From ecc96077aaab2a8b1950367e796dbd394a09ff22 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 16 Apr 2019 22:27:28 +0200 Subject: [PATCH 0001/1003] Update README.md after new release --- README.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index c7e6fc008..cc33606ce 100644 --- a/README.md +++ b/README.md @@ -9,19 +9,20 @@ A Java library based on the [VLog rule engine](https://github.com/karmaresearch/ Installation ------------ -The current release of VLog4j is version 0.2.0. The easiest way of using the library is with Maven. Maven users must add the following dependency to the dependencies in their pom.xml file: +The current release of VLog4j is version 0.3.0. The easiest way of using the library is with Maven. Maven users must add the following dependency to the dependencies in their pom.xml file: ``` org.semanticweb.vlog4j vlog4j-core - 0.2.0 + 0.3.0 ``` You need to use Java 1.8 or above. Available modules include: * **vlog4j-core**: essential data models for rules and facts, and essential reasoner functionality +* **vlog4j-graal**: support for converting rules, facts and queries from [Graal](http://graphik-team.github.io/graal/) API objects and [DLGP](http://graphik-team.github.io/graal/doc/dlgp) files * **vlog4j-rdf**: support for reading from RDF files * **vlog4j-owlapi**: support for converting rules from OWL ontology, loaded with the OWL API From 3b6c690e1f220a5742e9b371b739573bb5e76f8a Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Wed, 17 Apr 2019 10:00:02 +0200 Subject: [PATCH 0002/1003] configure failOnError false for javadoc-jars and aggregate-javadoc-jar --- pom.xml | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 8e4d666cd..a1b6b82b5 100644 --- a/pom.xml +++ b/pom.xml @@ -1,5 +1,6 @@ - 4.0.0 @@ -289,12 +290,18 @@ jar + + false + aggregate-javadoc-jar aggregate-jar + + false + From 0b002e17461bda1b6e266ce32147b9f1a4dbbf98 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Wed, 17 Apr 2019 11:31:42 +0200 Subject: [PATCH 0003/1003] update version to 0.4.0-SNAPSHOT --- pom.xml | 2 +- vlog4j-core/pom.xml | 2 +- vlog4j-examples/pom.xml | 2 +- vlog4j-graal/pom.xml | 2 +- vlog4j-owlapi/pom.xml | 2 +- vlog4j-rdf/pom.xml | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/pom.xml b/pom.xml index 2bc70aeab..f15a780ec 100644 --- a/pom.xml +++ b/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.3.0 + 0.4.0-SNAPSHOT pom VLog4j diff --git a/vlog4j-core/pom.xml b/vlog4j-core/pom.xml index 1d31abbd0..d2d7d3a8c 100644 --- a/vlog4j-core/pom.xml +++ b/vlog4j-core/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.3.0 + 0.4.0-SNAPSHOT vlog4j-core diff --git a/vlog4j-examples/pom.xml b/vlog4j-examples/pom.xml index 23bbd6ca6..fac638a4a 100644 --- a/vlog4j-examples/pom.xml +++ b/vlog4j-examples/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.3.0 + 0.4.0-SNAPSHOT vlog4j-examples diff --git a/vlog4j-graal/pom.xml b/vlog4j-graal/pom.xml index 9843b4258..f80fcac66 100644 --- a/vlog4j-graal/pom.xml +++ b/vlog4j-graal/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.3.0 + 0.4.0-SNAPSHOT vlog4j-graal diff --git a/vlog4j-owlapi/pom.xml b/vlog4j-owlapi/pom.xml index 19ff8b2f6..27300c51b 100644 --- a/vlog4j-owlapi/pom.xml +++ b/vlog4j-owlapi/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.3.0 + 0.4.0-SNAPSHOT vlog4j-owlapi diff --git a/vlog4j-rdf/pom.xml b/vlog4j-rdf/pom.xml index d8cfdb91a..c3044281b 100644 --- a/vlog4j-rdf/pom.xml +++ b/vlog4j-rdf/pom.xml @@ -8,7 +8,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.3.0 + 0.4.0-SNAPSHOT vlog4j-rdf From 919d432a18805ea59fd1b92d69473dcca58c68cc Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 7 May 2019 16:14:47 +0200 Subject: [PATCH 0004/1003] fixed typo in javadoc --- .../main/java/org/semanticweb/vlog4j/examples/DoidExample.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java index ea5ae1843..0fbfaa20b 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java @@ -47,7 +47,7 @@ /** * This example reasons about human diseases, based on information from the * Disease Ontology (DOID) and Wikidata. It illustrates how to load data from - * different sources (RDF file, SPARQL), and reason about these input using + * different sources (RDF file, SPARQL), and reason about these inputs using * rules that are loaded from a file. The rules used here employ existential * quantifiers and stratified negation. * From 5cfe9a53476956a386219813617fd9f5207bda9c Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 7 May 2019 16:33:49 +0200 Subject: [PATCH 0005/1003] added ReasoningState enum with: INCOMPLETE, WRONG, COMPLETE --- .../vlog4j/core/reasoner/KnowledgeBase.java | 25 ++++++++++ .../vlog4j/core/reasoner/ReasoningState.java | 46 +++++++++++++++++++ 2 files changed, 71 insertions(+) create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasoningState.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java new file mode 100644 index 000000000..1676e32de --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -0,0 +1,25 @@ +package org.semanticweb.vlog4j.core.reasoner; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +public class KnowledgeBase { + +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasoningState.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasoningState.java new file mode 100644 index 000000000..c71cd78f7 --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasoningState.java @@ -0,0 +1,46 @@ +package org.semanticweb.vlog4j.core.reasoner; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * Enum for different reasoning stages a {@link Reasoner} may be in, with respect to its {@link KnowledgeBase}. + * @author Irina Dragoste + * + */ +public enum ReasoningState { + + //TODO should we have different states for incomplete due to halting, vs incomplete due to adding facts for non-negated rules? + /** + * Reasoning has not completed. Query answering yields correct, but possibly incomplete answers. + */ + INCOMPLETE, + + /** + * Query answering may give incorrect answers. Re-reasoning ({@link Reasoner#reason()}) is required, in order to obtain correct results. + */ + WRONG, + + /** + * Reasoning over current knowledge base is complete, and query answering yields correct and complete results. + */ + COMPLETE + +} From 58262bef5cc59c4ea894011f666937c55cdf8bf8 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 7 May 2019 17:22:28 +0200 Subject: [PATCH 0006/1003] extend Observable and Observer --- .../vlog4j/core/reasoner/KnowledgeBase.java | 4 +++- .../org/semanticweb/vlog4j/core/reasoner/Reasoner.java | 3 ++- .../core/reasoner/implementation/VLogReasoner.java | 10 ++++++++++ 3 files changed, 15 insertions(+), 2 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java index 1676e32de..2b2cc054b 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.core.reasoner; +import java.util.Observable; + /*- * #%L * VLog4j Core Components @@ -20,6 +22,6 @@ * #L% */ -public class KnowledgeBase { +public class KnowledgeBase extends Observable { } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 895f9687b..2e0ef9e5d 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -3,6 +3,7 @@ import java.io.IOException; import java.util.Collection; import java.util.List; +import java.util.Observer; import org.eclipse.jdt.annotation.NonNull; import org.eclipse.jdt.annotation.Nullable; @@ -90,7 +91,7 @@ * @author Irina Dragoste * */ -public interface Reasoner extends AutoCloseable { +public interface Reasoner extends AutoCloseable, Observer { /** * Factory method that to instantiate a Reasoner. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index cdd2d2690..7769204b0 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -10,6 +10,7 @@ import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Observable; import java.util.Set; import org.apache.commons.lang3.Validate; @@ -61,6 +62,7 @@ */ public class VLogReasoner implements Reasoner { + private static Logger LOGGER = LoggerFactory.getLogger(VLogReasoner.class); private final VLog vLog = new VLog(); @@ -126,6 +128,7 @@ public void addRules(final List rules) throws ReasonerStateException { LOGGER.warn("Adding rules to a closed reasoner."); } + @Override public List getRules() { return Collections.unmodifiableList(this.rules); } @@ -500,4 +503,11 @@ public CyclicityResult checkForCycles() throws ReasonerStateException, NotStarte } } + @Override + public void update(Observable o, Object arg) { + // TODO update reasoning state for query answering + // TODO compute KB diff + + } + } From 31d5278b660fae9fe57e71dac2ea458135613d32 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 7 May 2019 17:55:46 +0200 Subject: [PATCH 0007/1003] add a KnowledgeBase to a Reasoner in the Reasoner constructor. * Register Reasoner as observer to the KnowledgeBase * Unregister Reasoner observer from the KnowledgeBase at Reasoner.close() --- .../vlog4j/core/reasoner/Reasoner.java | 7 ++-- .../reasoner/implementation/VLogReasoner.java | 21 +++++++--- .../vlog4j/core/reasoner/LoggingTest.java | 12 +++--- .../core/reasoner/ReasonerTimeoutTest.java | 2 +- .../implementation/AddDataSourceTest.java | 17 ++++---- .../implementation/AnswerQueryTest.java | 15 +++---- .../ExportQueryAnswersToCsvFileTest.java | 5 ++- .../FileDataSourceTestUtils.java | 3 +- .../GeneratedAnonymousIndividualsTest.java | 9 +++-- .../LoadDataFromCsvFileTest.java | 7 ++-- .../LoadDataFromMemoryTest.java | 9 +++-- .../LoadDataFromRdfFileTest.java | 7 ++-- .../LoadDataFromSparqlQueryTest.java | 13 ++++--- .../implementation/ReasonerStateTest.java | 39 ++++++++++--------- .../reasoner/implementation/ReasonerTest.java | 11 +++--- .../StratifiedNegationTest.java | 9 +++-- .../vlog4j/examples/DoidExample.java | 3 +- .../examples/core/AddDataFromCsvFile.java | 3 +- .../examples/core/AddDataFromRdfFile.java | 3 +- .../core/AddDataFromSparqlQueryResults.java | 3 +- .../core/ConfigureReasonerLogging.java | 3 +- .../SkolemVsRestrictedChaseTermination.java | 3 +- .../examples/graal/AddDataFromDlgpFile.java | 3 +- .../examples/graal/AddDataFromGraal.java | 3 +- .../owlapi/OwlOntologyToRulesAndFacts.java | 3 +- .../examples/rdf/AddDataFromRdfModel.java | 3 +- .../vlog4j/rdf/TestReasonOverRdfFacts.java | 5 ++- 27 files changed, 128 insertions(+), 93 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 2e0ef9e5d..6ed22fc48 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -94,12 +94,13 @@ public interface Reasoner extends AutoCloseable, Observer { /** - * Factory method that to instantiate a Reasoner. + * Factory method that to instantiate a Reasoner with given knowledge base. + * @param knowledgeBase Knowledge Base containing data associated to the reasoner * * @return a {@link VLogReasoner} instance. */ - public static Reasoner getInstance() { - return new VLogReasoner(); + public static Reasoner getInstance(KnowledgeBase knowledgeBase) { + return new VLogReasoner(knowledgeBase); } /** diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 7769204b0..ccef08523 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -23,6 +23,7 @@ import org.semanticweb.vlog4j.core.reasoner.Algorithm; import org.semanticweb.vlog4j.core.reasoner.CyclicityResult; import org.semanticweb.vlog4j.core.reasoner.DataSource; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.LogLevel; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.ReasonerState; @@ -62,9 +63,11 @@ */ public class VLogReasoner implements Reasoner { - + private static Logger LOGGER = LoggerFactory.getLogger(VLogReasoner.class); + private final KnowledgeBase knowledgeBase; + private final VLog vLog = new VLog(); private ReasonerState reasonerState = ReasonerState.BEFORE_LOADING; @@ -77,6 +80,12 @@ public class VLogReasoner implements Reasoner { private final Map> factsForPredicate = new HashMap<>(); private final Map dataSourceForPredicate = new HashMap<>(); + public VLogReasoner(KnowledgeBase knowledgeBase) { + super(); + this.knowledgeBase = knowledgeBase; + this.knowledgeBase.addObserver(this); + } + /** * Holds the state of the reasoning result. Has value {@code true} if reasoning * has completed, {@code false} if it has been interrupted. @@ -130,7 +139,7 @@ public void addRules(final List rules) throws ReasonerStateException { @Override public List getRules() { - return Collections.unmodifiableList(this.rules); + return Collections.unmodifiableList(this.rules); } @Override @@ -339,7 +348,7 @@ public void exportQueryAnswersToCsv(final PositiveLiteral query, final String cs @Override public void resetReasoner() throws ReasonerStateException { - if (reasonerState.equals(ReasonerState.AFTER_CLOSING)) + if (this.reasonerState.equals(ReasonerState.AFTER_CLOSING)) throw new ReasonerStateException(reasonerState, "Resetting is not allowed after closing."); this.reasonerState = ReasonerState.BEFORE_LOADING; this.vLog.stop(); @@ -349,7 +358,9 @@ public void resetReasoner() throws ReasonerStateException { @Override public void close() { - reasonerState = ReasonerState.AFTER_CLOSING; + this.reasonerState = ReasonerState.AFTER_CLOSING; + + this.knowledgeBase.deleteObserver(this); this.vLog.stop(); } @@ -507,7 +518,7 @@ public CyclicityResult checkForCycles() throws ReasonerStateException, NotStarte public void update(Observable o, Object arg) { // TODO update reasoning state for query answering // TODO compute KB diff - + } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java index 31fccec7c..e1c1a555e 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java @@ -59,7 +59,7 @@ public class LoggingTest { @Test public void testSetLogFileNull() throws ReasonerStateException, IOException, EdbIdbSeparationException, IncompatiblePredicateArityException { - try (final Reasoner instance = Reasoner.getInstance()) { + try (final Reasoner instance = Reasoner.getInstance(new KnowledgeBase())) { instance.setLogFile(null); instance.setLogLevel(LogLevel.INFO); @@ -75,7 +75,7 @@ public void testSetLogFileNull() throws ReasonerStateException, IOException, Edb public void testSetLogFileInexistent() throws ReasonerStateException, IOException, EdbIdbSeparationException, IncompatiblePredicateArityException { final String inexistentFilePath = LOGS_FOLDER + "a/b"; - try (final Reasoner instance = Reasoner.getInstance()) { + try (final Reasoner instance = Reasoner.getInstance(new KnowledgeBase())) { instance.setLogFile(inexistentFilePath); assertFalse(new File(inexistentFilePath).exists()); instance.setLogLevel(LogLevel.INFO); @@ -91,7 +91,7 @@ public void testSetLogFileInexistent() throws ReasonerStateException, IOExceptio @Test(expected = NullPointerException.class) public void testSetLogLevelNull() throws ReasonerStateException { - try (final Reasoner instance = Reasoner.getInstance()) { + try (final Reasoner instance = Reasoner.getInstance(new KnowledgeBase())) { instance.setLogLevel(null); } } @@ -102,7 +102,7 @@ public void testSetLogFileAppendsToFile() throws EdbIdbSeparationException, IOEx assertFalse(new File(logFilePath).exists()); int countLinesBeforeReset = 0; - try (final Reasoner instance = Reasoner.getInstance()) { + try (final Reasoner instance = Reasoner.getInstance(new KnowledgeBase())) { instance.addFacts(factPc); instance.addRules(rule); instance.setLogLevel(LogLevel.INFO); @@ -128,7 +128,7 @@ public void testLogLevelInfo() throws ReasonerStateException, EdbIdbSeparationEx final String logFilePath = LOGS_FOLDER + System.currentTimeMillis() + "-testLogLevelInfo.log"; assertFalse(new File(logFilePath).exists()); - try (final Reasoner instance = Reasoner.getInstance()) { + try (final Reasoner instance = Reasoner.getInstance(new KnowledgeBase())) { instance.addFacts(factPc); instance.addRules(rule); @@ -149,7 +149,7 @@ public void testLogLevelDebug() throws ReasonerStateException, EdbIdbSeparationE final String logFilePath = LOGS_FOLDER + System.currentTimeMillis() + "-testLogLevelDebug.log"; assertFalse(new File(logFilePath).exists()); - try (final Reasoner instance = Reasoner.getInstance()) { + try (final Reasoner instance = Reasoner.getInstance(new KnowledgeBase())) { instance.addFacts(factPc); instance.addRules(rule); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java index fd982f3c6..3c8158b9b 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java @@ -108,7 +108,7 @@ public static void setUpBeforeClass() { @Before public void setUp() throws ReasonerStateException { - reasoner = Reasoner.getInstance(); + reasoner = Reasoner.getInstance(new KnowledgeBase()); reasoner.addFacts(facts); reasoner.addRules(rules); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java index 9bf1a7816..d804f31dc 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java @@ -34,6 +34,7 @@ import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.DataSource; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; @@ -54,7 +55,7 @@ public void testAddDataSourceExistentDataForDifferentPredicates() throws Reasone final Predicate predicateLArity1 = Expressions.makePredicate("l", 1); final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); - try (final VLogReasoner reasoner = new VLogReasoner()) { + try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.addFacts(factPredicatePArity2, factPredicateQArity1); reasoner.addFactsFromDataSource(predicateLArity1, dataSource); reasoner.addFactsFromDataSource(predicateParity1, dataSource); @@ -78,7 +79,7 @@ public void testAddDataSourceBeforeLoading() throws ReasonerStateException, EdbI final Predicate predicateP = Expressions.makePredicate("p", 1); final Predicate predicateQ = Expressions.makePredicate("q", 1); final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); - try (final VLogReasoner reasoner = new VLogReasoner()) { + try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.addFactsFromDataSource(predicateP, dataSource); reasoner.addFactsFromDataSource(predicateQ, dataSource); reasoner.load(); @@ -91,7 +92,7 @@ public void testAddDataSourceAfterLoading() throws ReasonerStateException, EdbId final Predicate predicateP = Expressions.makePredicate("p", 1); final Predicate predicateQ = Expressions.makePredicate("q", 1); final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); - try (final VLogReasoner reasoner = new VLogReasoner()) { + try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.addFactsFromDataSource(predicateP, dataSource); reasoner.load(); reasoner.addFactsFromDataSource(predicateQ, dataSource); @@ -104,7 +105,7 @@ public void testAddDataSourceAfterReasoning() throws ReasonerStateException, Edb final Predicate predicateP = Expressions.makePredicate("p", 1); final Predicate predicateQ = Expressions.makePredicate("q", 1); final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); - try (final VLogReasoner reasoner = new VLogReasoner()) { + try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.addFactsFromDataSource(predicateP, dataSource); reasoner.load(); reasoner.reason(); @@ -116,7 +117,7 @@ public void testAddDataSourceAfterReasoning() throws ReasonerStateException, Edb public void testAddDataSourceNoMultipleDataSourcesForPredicate() throws ReasonerStateException, IOException { final Predicate predicate = Expressions.makePredicate("p", 1); final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); - try (final VLogReasoner reasoner = new VLogReasoner()) { + try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.addFactsFromDataSource(predicate, dataSource); reasoner.addFactsFromDataSource(predicate, dataSource); } @@ -127,7 +128,7 @@ public void testAddDataSourceNoFactsForPredicate() throws ReasonerStateException final Predicate predicate = Expressions.makePredicate("p", 1); final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); final PositiveLiteral fact = Expressions.makePositiveLiteral(Expressions.makePredicate("p", 1), Expressions.makeConstant("a")); - try (final VLogReasoner reasoner = new VLogReasoner()) { + try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.addFacts(fact); reasoner.addFactsFromDataSource(predicate, dataSource); } @@ -136,7 +137,7 @@ public void testAddDataSourceNoFactsForPredicate() throws ReasonerStateException @Test(expected = NullPointerException.class) public void testAddDataSourcePredicateNotNull() throws ReasonerStateException, IOException { final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); - try (final VLogReasoner reasoner = new VLogReasoner()) { + try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.addFactsFromDataSource(null, dataSource); } } @@ -144,7 +145,7 @@ public void testAddDataSourcePredicateNotNull() throws ReasonerStateException, I @Test(expected = NullPointerException.class) public void testAddDataSourceNotNullDataSource() throws ReasonerStateException { final Predicate predicate = Expressions.makePredicate("p", 1); - try (final VLogReasoner reasoner = new VLogReasoner()) { + try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.addFactsFromDataSource(predicate, null); } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java index 5b749138f..c9f02872d 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java @@ -43,6 +43,7 @@ import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.Algorithm; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; @@ -68,7 +69,7 @@ public void testEDBQuerySameConstantSubstitutesSameVariableName() @SuppressWarnings("unchecked") final Set> factCCD = Sets.newSet(Arrays.asList(constantC, constantC, constantD)); - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.addFacts(fact); reasoner.load(); @@ -110,7 +111,7 @@ public void testIDBQuerySameBlankSubstitutesSameVariableName() Expressions.makeConjunction(Expressions.makePositiveLiteral(predicate, x))); assertEquals(Sets.newSet(y, z), pX__pYY_pYZ.getExistentiallyQuantifiedVariables()); - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.SPLIT_HEAD_PIECES); reasoner.addFacts(Expressions.makePositiveLiteral(predicate, Expressions.makeConstant("c"))); @@ -152,7 +153,7 @@ public void testIDBQuerySameIndividualSubstitutesSameVariableName() final Constant constantD = Expressions.makeConstant("d"); final PositiveLiteral factPcd = Expressions.makePositiveLiteral(predicate, constantC, constantD); - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.addFacts(factPcd); reasoner.addRules(pXY__pXYYZZT); reasoner.load(); @@ -217,7 +218,7 @@ public void queryResultWithBlanks() throws ReasonerStateException, EdbIdbSeparat final PositiveLiteral fact = Expressions.makePositiveLiteral("p", constantC); final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("q", Expressions.makeVariable("?x")); - try (final VLogReasoner reasoner = new VLogReasoner()) { + try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.addFacts(fact); reasoner.addRules(existentialRule); reasoner.load(); @@ -240,7 +241,7 @@ public void queryResultWithBlanks() throws ReasonerStateException, EdbIdbSeparat @Test public void queryEmptyKnowledgeBase() throws IOException, EdbIdbSeparationException, ReasonerStateException, IncompatiblePredicateArityException { - try (final VLogReasoner reasoner = new VLogReasoner()) { + try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.load(); final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("P", Expressions.makeVariable("?x")); @@ -258,7 +259,7 @@ public void queryEmptyKnowledgeBase() throws IOException, EdbIdbSeparationExcept @Test public void queryEmptyRules() throws IOException, EdbIdbSeparationException, ReasonerStateException, IncompatiblePredicateArityException { - try (final VLogReasoner reasoner = new VLogReasoner()) { + try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { final PositiveLiteral fact = Expressions.makePositiveLiteral("P", Expressions.makeConstant("c")); reasoner.addFacts(fact); reasoner.load(); @@ -282,7 +283,7 @@ public void queryEmptyFacts() final Variable vx = Expressions.makeVariable("x"); final Rule rule = Expressions.makeRule(Expressions.makePositiveLiteral("q", vx), Expressions.makePositiveLiteral("p", vx)); - try (final VLogReasoner reasoner = new VLogReasoner()) { + try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.addRules(rule); reasoner.load(); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExportQueryAnswersToCsvFileTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExportQueryAnswersToCsvFileTest.java index 14a1d9fea..3adabb19a 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExportQueryAnswersToCsvFileTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExportQueryAnswersToCsvFileTest.java @@ -32,6 +32,7 @@ import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; @@ -54,7 +55,7 @@ public void testEDBQuerySameConstantSubstitutesSameVariableName() // final String csvFilePath = CSV_EXPORT_FOLDER + "output"; final List> factCCD = Arrays.asList(Arrays.asList("c", "c", "d")); - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.addFacts(fact); reasoner.load(); @@ -90,7 +91,7 @@ public void testExportQueryEmptyKnowledgeBase() throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("p", Expressions.makeVariable("?x"), Expressions.makeVariable("?y")); - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.load(); final String emptyFilePath = FileDataSourceTestUtils.OUTPUT_FOLDER + "empty.csv"; reasoner.exportQueryAnswersToCsv(queryAtom, emptyFilePath, true); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java index 64cbfa048..9501476d1 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java @@ -36,6 +36,7 @@ import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.reasoner.Algorithm; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; @@ -103,7 +104,7 @@ public static void testConstructor(final FileDataSource fileDataSource, final Fi public static void testLoadEmptyFile(final Predicate predicate, final PositiveLiteral queryAtom, final FileDataSource emptyFileDataSource) throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.addFactsFromDataSource(predicate, emptyFileDataSource); reasoner.load(); reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java index 7e6d2eec6..db2693726 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java @@ -35,6 +35,7 @@ import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.Algorithm; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; @@ -70,7 +71,7 @@ public class GeneratedAnonymousIndividualsTest { @Test public void testBlanksSkolemChaseNoRuleRewrite() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); assertEquals(RuleRewriteStrategy.NONE, reasoner.getRuleRewriteStrategy()); @@ -87,7 +88,7 @@ public void testBlanksSkolemChaseNoRuleRewrite() @Test public void testBlanksSkolemChaseSplitHeadPieces() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); // P(?x) -> P(?x,!y), P(?x,!z) // after split becomes {{P(?x) -> P(?x,!y), {P(?x)-> P(?x,!z)}} @@ -106,7 +107,7 @@ public void testBlanksSkolemChaseSplitHeadPieces() @Test public void testBlanksRestrictedChaseNoRuleRewrite() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); assertEquals(RuleRewriteStrategy.NONE, reasoner.getRuleRewriteStrategy()); @@ -124,7 +125,7 @@ public void testBlanksRestrictedChaseNoRuleRewrite() public void testBlanksRestrictedChaseSplitHeadPieces() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); // {P(?x) -> P(?x,!y), P(?x,!z)} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromCsvFileTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromCsvFileTest.java index a69e8cb57..c3ef3ba44 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromCsvFileTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromCsvFileTest.java @@ -37,6 +37,7 @@ import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; @@ -75,7 +76,7 @@ public void testLoadUnaryFactsFromCsvFile() throws ReasonerStateException, EdbId private void testLoadUnaryFactsFromSingleCsvDataSource(final FileDataSource fileDataSource) throws ReasonerStateException, EdbIdbSeparationException, EDBConfigurationException, IOException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.addFactsFromDataSource(unaryPredicate1, fileDataSource); reasoner.addFactsFromDataSource(unaryPredicate2, fileDataSource); reasoner.load(); @@ -108,7 +109,7 @@ public void testLoadNonexistingCsvFile() assertFalse(nonexistingFile.exists()); final FileDataSource fileDataSource = new CsvFileDataSource(nonexistingFile); - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.addFactsFromDataSource(unaryPredicate1, fileDataSource); reasoner.load(); } @@ -120,7 +121,7 @@ public void testLoadCsvFileWrongArity() final FileDataSource fileDataSource = new CsvFileDataSource( new File(FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.binaryCsvFileNameRoot + ".csv")); - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.addFactsFromDataSource(unaryPredicate1, fileDataSource); reasoner.load(); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java index b37e6a9e8..5d14185df 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java @@ -28,6 +28,7 @@ import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.BlankImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; @@ -44,7 +45,7 @@ public void loadEdbIdbNotSeparated() final PositiveLiteral factIDBpredQ1 = Expressions.makePositiveLiteral("q", Expressions.makeConstant("c")); final PositiveLiteral factEDBpredQ2 = Expressions.makePositiveLiteral("q", Expressions.makeConstant("d"), Expressions.makeConstant("d")); - try (final VLogReasoner reasoner = new VLogReasoner()) { + try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.addRules(rule); reasoner.addFacts(factIDBpredQ1, factEDBpredQ2); reasoner.load(); @@ -59,7 +60,7 @@ public void loadEdbIdbSeparated() final PositiveLiteral factEDBpred = Expressions.makePositiveLiteral("q", Expressions.makeConstant("d"), Expressions.makeConstant("d")); - try (final VLogReasoner reasoner = new VLogReasoner()) { + try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.addRules(rule); reasoner.addFacts(factEDBpred); reasoner.load(); @@ -71,7 +72,7 @@ public void addFactsWithVariableTerms() throws ReasonerStateException { final PositiveLiteral factWithVariableTerms = Expressions.makePositiveLiteral("q", Expressions.makeConstant("d"), Expressions.makeVariable("x")); - try (final VLogReasoner reasoner = new VLogReasoner()) { + try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.addFacts(factWithVariableTerms); } } @@ -80,7 +81,7 @@ public void addFactsWithVariableTerms() throws ReasonerStateException { public void addFactsWithBlankTerms() throws ReasonerStateException { final PositiveLiteral factWithBlankTerms = Expressions.makePositiveLiteral("q", Expressions.makeConstant("d"), new BlankImpl("b")); - try (final VLogReasoner reasoner = new VLogReasoner()) { + try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.addFacts(factWithBlankTerms); } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java index 435329e0b..021ca1c78 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java @@ -37,6 +37,7 @@ import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; @@ -76,7 +77,7 @@ public void testLoadTernaryFactsFromRdfFile() throws ReasonerStateException, Edb public void testLoadTernaryFactsFromSingleRdfDataSource(final FileDataSource fileDataSource) throws ReasonerStateException, EdbIdbSeparationException, EDBConfigurationException, IOException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.addFactsFromDataSource(ternaryPredicate, fileDataSource); reasoner.load(); @@ -94,7 +95,7 @@ public void testLoadNonexistingRdfFile() assertFalse(nonexistingFile.exists()); final FileDataSource fileDataSource = new RdfFileDataSource(nonexistingFile); - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.addFactsFromDataSource(ternaryPredicate, fileDataSource); reasoner.load(); } @@ -106,7 +107,7 @@ public void testLoadRdfInvalidFormat() final FileDataSource fileDataSource = new RdfFileDataSource( new File(FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.invalidFormatNtFileNameRoot + ".nt")); - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.addFactsFromDataSource(ternaryPredicate, fileDataSource); reasoner.load(); FileDataSourceTestUtils.testNoFactsOverPredicate(reasoner, queryAtom); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromSparqlQueryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromSparqlQueryTest.java index 77acf6410..8e74622ab 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromSparqlQueryTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromSparqlQueryTest.java @@ -34,6 +34,7 @@ import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; @@ -61,7 +62,7 @@ public void testSimpleSparqlQuery() "?a wdt:P22 ?b"); final Predicate fatherOfPredicate = Expressions.makePredicate("FatherOf", 2); - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.addFactsFromDataSource(fatherOfPredicate, dataSource); reasoner.load(); try (final QueryResultIterator answerQuery = reasoner.answerQuery(Expressions.makePositiveLiteral(fatherOfPredicate, @@ -86,7 +87,7 @@ public void testSimpleSparqlQueryHttps() "?a wdt:P22 ?b"); final Predicate fatherOfPredicate = Expressions.makePredicate("FatherOf", 2); - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.addFactsFromDataSource(fatherOfPredicate, dataSource); reasoner.load(); try (final QueryResultIterator answerQuery = reasoner.answerQuery(Expressions.makePositiveLiteral(fatherOfPredicate, @@ -119,7 +120,7 @@ public void testSimpleSparqlQuery2() "?a wdt:P22 ?b ."); final Predicate fatherOfPredicate = Expressions.makePredicate("FatherOf", 2); - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.addFactsFromDataSource(fatherOfPredicate, dataSource); reasoner.load(); try (final QueryResultIterator answerQuery = reasoner.answerQuery(Expressions.makePositiveLiteral(fatherOfPredicate, @@ -142,7 +143,7 @@ public void testConjunctiveQueryNewLineCharacterInQueryBody() "?b wdt:P22 ?a .\n" + "?b wdt:P25 ?c"); final Predicate haveChildrenTogether = Expressions.makePredicate("haveChildrenTogether", 2); - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.addFactsFromDataSource(haveChildrenTogether, dataSource); reasoner.load(); reasoner.answerQuery(Expressions.makePositiveLiteral(haveChildrenTogether, Expressions.makeVariable("x"), @@ -162,7 +163,7 @@ public void testConjunctiveQuery() "?b wdt:P22 ?a ." + "?b wdt:P25 ?c"); final Predicate haveChildrenTogether = Expressions.makePredicate("haveChildrenTogether", 2); - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.addFactsFromDataSource(haveChildrenTogether, dataSource); reasoner.load(); try (final QueryResultIterator answerQuery = reasoner.answerQuery(Expressions.makePositiveLiteral(haveChildrenTogether, @@ -184,7 +185,7 @@ public void testDataSourcePredicateDoesNotMatchSparqlQueryTerms() // b has father a and b has mother c "?b wdt:P22 ?a ." + "?b wdt:P25 ?c"); - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { // TODO must validate predicate arity sonner reasoner.addFactsFromDataSource(Expressions.makePredicate("ternary", 3), dataSource); reasoner.load(); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java index 8b7cd4d2e..6df89d531 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java @@ -41,6 +41,7 @@ import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.Algorithm; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; @@ -64,21 +65,21 @@ public class ReasonerStateTest { @Test(expected = NullPointerException.class) public void testSetAlgorithm() { - try (final Reasoner reasoner = Reasoner.getInstance();) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase());) { reasoner.setAlgorithm(null); } } @Test(expected = IllegalArgumentException.class) public void testSetReasoningTimeout() { - try (final Reasoner reasoner = Reasoner.getInstance();) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase());) { reasoner.setReasoningTimeout(-3); } } @Test(expected = ReasonerStateException.class) public void testAddRules1() throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance();) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase());) { reasoner.load(); reasoner.addRules(ruleQxPx); } @@ -86,7 +87,7 @@ public void testAddRules1() throws EdbIdbSeparationException, IOException, Reaso @Test public void testAddRules2() throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance();) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase());) { reasoner.load(); reasoner.resetReasoner(); reasoner.addRules(ruleQxPx); @@ -95,7 +96,7 @@ public void testAddRules2() throws EdbIdbSeparationException, IOException, Reaso @Test(expected = IllegalArgumentException.class) public void testAddRules3() throws EdbIdbSeparationException, IOException, ReasonerStateException { - try (final Reasoner reasoner = Reasoner.getInstance();) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase());) { final List rules = new ArrayList<>(); rules.add(ruleQxPx); rules.add(null); @@ -105,7 +106,7 @@ public void testAddRules3() throws EdbIdbSeparationException, IOException, Reaso @Test(expected = ReasonerStateException.class) public void testAddFacts1() throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance();) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase());) { reasoner.load(); reasoner.addFacts(factPc); } @@ -113,7 +114,7 @@ public void testAddFacts1() throws EdbIdbSeparationException, IOException, Reaso @Test(expected = IllegalArgumentException.class) public void testAddFacts2() throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance();) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase());) { final List facts = new ArrayList<>(); facts.add(factPc); facts.add(null); @@ -124,21 +125,21 @@ public void testAddFacts2() throws EdbIdbSeparationException, IOException, Reaso @Test public void testResetBeforeLoad() throws ReasonerStateException { - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.resetReasoner(); } } @Test(expected = NullPointerException.class) public void setRuleRewriteStrategy1() throws ReasonerStateException { - try (final Reasoner reasoner = Reasoner.getInstance();) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase());) { reasoner.setRuleRewriteStrategy(null); } } @Test(expected = ReasonerStateException.class) public void setRuleRewriteStrategy2() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance();) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase());) { reasoner.load(); reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.NONE); } @@ -146,7 +147,7 @@ public void setRuleRewriteStrategy2() throws ReasonerStateException, EdbIdbSepar @Test public void setRuleRewriteStrategy3() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance();) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase());) { reasoner.load(); reasoner.resetReasoner(); reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.NONE); @@ -157,7 +158,7 @@ public void setRuleRewriteStrategy3() throws ReasonerStateException, EdbIdbSepar public void testResetDiscardInferences() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { for (final Algorithm algorithm : Algorithm.values()) { // discard inferences regardless of the inference algorithm - try (final Reasoner reasoner = Reasoner.getInstance();) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase());) { reasoner.addFacts(factPc); reasoner.addRules(ruleQxPx); reasoner.setAlgorithm(algorithm); @@ -189,7 +190,7 @@ public void testResetDiscardInferences() throws ReasonerStateException, EdbIdbSe @Test public void testResetKeepExplicitDatabase() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance();) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase());) { // assert p(c) reasoner.addFacts(factPc); // assert r(d) @@ -233,7 +234,7 @@ private void checkExplicitFacts(final Reasoner reasoner, final Predicate predica @Test public void testResetEmptyKnowledgeBase() throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { - final Reasoner reasoner = Reasoner.getInstance(); + final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase()); // 1. load and reason reasoner.load(); try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(exampleQueryAtom, true)) { @@ -266,28 +267,28 @@ public void testResetEmptyKnowledgeBase() throws EdbIdbSeparationException, IOEx @Test(expected = ReasonerStateException.class) public void testFailReasonBeforeLoad() throws ReasonerStateException, IOException { - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.reason(); } } @Test(expected = ReasonerStateException.class) public void testFailAnswerQueryBeforeLoad() throws ReasonerStateException { - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.answerQuery(exampleQueryAtom, true); } } @Test(expected = ReasonerStateException.class) public void testFailExportQueryAnswerToCsvBeforeLoad() throws ReasonerStateException, IOException { - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.exportQueryAnswersToCsv(exampleQueryAtom, FileDataSourceTestUtils.OUTPUT_FOLDER + "output.csv", true); } } @Test public void testSuccessiveCloseAfterLoad() throws EdbIdbSeparationException, IOException, IncompatiblePredicateArityException, ReasonerStateException { - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.load(); reasoner.close(); reasoner.close(); @@ -296,7 +297,7 @@ public void testSuccessiveCloseAfterLoad() throws EdbIdbSeparationException, IOE @Test public void testSuccessiveCloseBeforeLoad() { - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.close(); reasoner.close(); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerTest.java index 993bf8619..c95796f98 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerTest.java @@ -37,6 +37,7 @@ import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; @@ -62,11 +63,11 @@ public class ReasonerTest { @Test public void testCloseRepeatedly() throws EdbIdbSeparationException, IOException, IncompatiblePredicateArityException, ReasonerStateException { - try (final VLogReasoner reasoner = new VLogReasoner()) { + try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.close(); } - try (final VLogReasoner reasoner = new VLogReasoner()) { + try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.load(); reasoner.close(); reasoner.close(); @@ -76,7 +77,7 @@ public void testCloseRepeatedly() @Test public void testLoadRules() throws EdbIdbSeparationException, IOException, IncompatiblePredicateArityException, ReasonerStateException { - try (final VLogReasoner reasoner = new VLogReasoner()) { + try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.addRules(ruleBxAx, ruleCxBx); reasoner.addRules(ruleBxAx); assertEquals(reasoner.getRules(), Arrays.asList(ruleBxAx, ruleCxBx, ruleBxAx)); @@ -87,7 +88,7 @@ public void testLoadRules() public void testSimpleInference() throws EDBConfigurationException, IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { - try (final VLogReasoner reasoner = new VLogReasoner()) { + try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.addFacts(factAc, factAd); reasoner.addRules(ruleBxAx, ruleCxBx); reasoner.load(); @@ -110,7 +111,7 @@ public void testSimpleInference() throws EDBConfigurationException, IOException, @Test public void testGenerateDataSourcesConfigEmpty() throws ReasonerStateException, IOException { - try (final VLogReasoner reasoner = new VLogReasoner()) { + try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { final String dataSourcesConfig = reasoner.generateDataSourcesConfig(); assertTrue(dataSourcesConfig.isEmpty()); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/StratifiedNegationTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/StratifiedNegationTest.java index d4cb1da1c..2e0362a38 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/StratifiedNegationTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/StratifiedNegationTest.java @@ -39,6 +39,7 @@ import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; @@ -60,7 +61,7 @@ public void testNotStratifiableEdbIdbSeparation() final Rule rule = makeRule(qXY, pXY, notQXY); final PositiveLiteral fact = makePositiveLiteral("Q", makeConstant("c"), makeConstant("d")); - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.addRules(rule); reasoner.addFacts(fact); @@ -82,7 +83,7 @@ public void testNotStratifiable() final Rule rule = makeRule(qXY, pXY, notQXY); final PositiveLiteral fact = makePositiveLiteral("P", makeConstant("c"), makeConstant("d")); - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.addRules(rule); reasoner.addFacts(fact); @@ -111,7 +112,7 @@ public void testStratifiable() final PositiveLiteral qCD = makePositiveLiteral("Q", makeConstant("c"), makeConstant("d")); - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.addRules(rule); reasoner.addFacts(pCD, pEF, qCD); @@ -146,7 +147,7 @@ public void testInputNegation() final PositiveLiteral qCD = makePositiveLiteral("Q", makeConstant("c"), makeConstant("d")); - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.addRules(rule); reasoner.addFacts(pCD, pEF, qCD); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java index 0fbfaa20b..f8d9d0fef 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java @@ -33,6 +33,7 @@ import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.DataSource; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; @@ -63,7 +64,7 @@ public static void main(final String[] args) final URL wikidataSparqlEndpoint = new URL("https://query.wikidata.org/sparql"); - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { /* Configure RDF data source */ final Predicate doidTriplePredicate = makePredicate("doidTriple", 3); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java index e52302d3d..cd38ee150 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java @@ -38,6 +38,7 @@ import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.reasoner.DataSource; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; @@ -132,7 +133,7 @@ public static void main(final String[] args) * 2. Loading, reasoning, and querying while using try-with-resources to close * the reasoner automatically. */ - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.addRules(rule1, rule2, rule3, rule4, rule5, rule6, rule7, rule8); /* Importing {@code .csv} files as data sources. */ diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java index 315e72e18..cb70270f0 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java @@ -38,6 +38,7 @@ import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.reasoner.DataSource; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; @@ -132,7 +133,7 @@ public static void main(final String[] args) * 2. Loading, reasoning, querying and exporting, while using try-with-resources * to close the reasoner automatically. */ - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.addRules(rule1, rule2, rule3, rule4, rule5); /* Importing {@code .nt.gz} file as data source. */ diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java index 8f1c38270..59a6256c1 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java @@ -34,6 +34,7 @@ import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.DataSource; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; @@ -120,7 +121,7 @@ public static void main(final String[] args) final Predicate titleOfPublicationThatHasAuthorsWhoParentTheSameChild = Expressions .makePredicate("publicationAndAuthorsWhoParentTheSameChild", 3); - try (Reasoner reasoner = Reasoner.getInstance()) { + try (Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { /* * The SPARQL query results will be added to the reasoner knowledge base, as diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java index 3abf42d8a..1bf960fb4 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java @@ -36,6 +36,7 @@ import org.eclipse.jdt.annotation.Nullable; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.LogLevel; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; @@ -95,7 +96,7 @@ public class ConfigureReasonerLogging { public static void main(final String[] args) throws EdbIdbSeparationException, IncompatiblePredicateArityException, IOException, ReasonerStateException { - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.addRules(rules); reasoner.addFacts(fact); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java index 2f4280fed..6b0956ad4 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java @@ -29,6 +29,7 @@ import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.Algorithm; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; @@ -121,7 +122,7 @@ public static void main(final String[] args) * 2. Loading, reasoning, and querying. Use try-with resources, or remember to * call close() to free the reasoner resources. */ - try (Reasoner reasoner = Reasoner.getInstance()) { + try (Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.addRules(rule1, rule2, rule3, rule4, rule5, rule6, rule7, rule8); reasoner.addFacts(fact1, fact2, fact3, fact4); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java index 44d599d04..df8a884dc 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java @@ -27,6 +27,7 @@ import java.util.ArrayList; import java.util.List; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; @@ -103,7 +104,7 @@ public static void main(final String[] args) * 3. Loading, reasoning, and querying while using try-with-resources to close * the reasoner automatically. */ - try (Reasoner reasoner = Reasoner.getInstance()) { + try (Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.addRules(GraalToVLog4JModelConverter.convertRules(graalRules)); reasoner.addFacts(GraalToVLog4JModelConverter.convertAtoms(graalAtoms)); for (final GraalConjunctiveQueryToRule graalConjunctiveQueryToRule : convertedConjunctiveQueries) { diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java index 90abb2b3b..be80b72f8 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java @@ -25,6 +25,7 @@ import java.util.List; import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; @@ -123,7 +124,7 @@ public static void main(final String[] args) * 4. Loading, reasoning, and querying while using try-with-resources to close * the reasoner automatically. */ - try (Reasoner reasoner = Reasoner.getInstance()) { + try (Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.addRules(GraalToVLog4JModelConverter.convertRules(graalRules)); reasoner.addRules(convertedGraalConjunctiveQuery.getRule()); reasoner.addFacts(GraalToVLog4JModelConverter.convertAtoms(graalAtoms)); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java index 526ea2578..742317f17 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java @@ -35,6 +35,7 @@ import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; @@ -84,7 +85,7 @@ public static void main(final String[] args) throws OWLOntologyCreationException } System.out.println(); - try (Reasoner reasoner = Reasoner.getInstance()) { + try (Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { /* Load rules and facts obtained from the ontology */ reasoner.addRules(new ArrayList<>(owlToRulesConverter.getRules())); reasoner.addFacts(owlToRulesConverter.getFacts()); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java index 032e7af18..e72533712 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java @@ -44,6 +44,7 @@ import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; @@ -145,7 +146,7 @@ public static void main(final String[] args) throws IOException, RDFParseExcepti final Rule organizationRule = Expressions.makeRule(creatorOrganizationName, personHasAffiliation, affiliationWithOrganization, organizationHasName); - try (final Reasoner reasoner = Reasoner.getInstance();) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase());) { /* * Facts extracted from the RDF resources are added to the Reasoner's knowledge * base. diff --git a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java b/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java index 9ec1b5f70..57665eeab 100644 --- a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java +++ b/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java @@ -44,6 +44,7 @@ import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Variable; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; @@ -69,7 +70,7 @@ public void testCanLoadRdfFactsIntoReasoner() throws RDFParseException, RDFHandl RDFFormat.TURTLE); final Set facts = RdfModelConverter.rdfModelToPositiveLiterals(model); - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.addFacts(facts); reasoner.load(); @@ -87,7 +88,7 @@ public void testQueryAnsweringOverRdfFacts() throws RDFParseException, RDFHandle RDFFormat.TURTLE); final Set facts = RdfModelConverter.rdfModelToPositiveLiterals(model); - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { reasoner.addFacts(facts); reasoner.load(); From 6d754f8eb7e93a7846f26e8b1a9ba5aadf68e129 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 7 May 2019 17:58:49 +0200 Subject: [PATCH 0008/1003] added TODO --- .../vlog4j/core/reasoner/implementation/VLogReasoner.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index ccef08523..8fe86c7c3 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -348,6 +348,7 @@ public void exportQueryAnswersToCsv(final PositiveLiteral query, final String cs @Override public void resetReasoner() throws ReasonerStateException { + // TODO what should happen to the KB? if (this.reasonerState.equals(ReasonerState.AFTER_CLOSING)) throw new ReasonerStateException(reasonerState, "Resetting is not allowed after closing."); this.reasonerState = ReasonerState.BEFORE_LOADING; @@ -359,7 +360,7 @@ public void resetReasoner() throws ReasonerStateException { @Override public void close() { this.reasonerState = ReasonerState.AFTER_CLOSING; - + this.knowledgeBase.deleteObserver(this); this.vLog.stop(); } From 0a8199129b73f0e53d36c1bee0034ae47bb6aaf8 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Wed, 8 May 2019 11:45:42 +0200 Subject: [PATCH 0009/1003] Moved addRules and getRules to KB. --- .../vlog4j/core/reasoner/KnowledgeBase.java | 43 ++++++- .../vlog4j/core/reasoner/Reasoner.java | 41 +------ .../implementation/KnowledgeBaseImpl.java | 56 +++++++++ .../reasoner/implementation/VLogReasoner.java | 32 +----- .../vlog4j/core/reasoner/LoggingTest.java | 32 ++++-- .../core/reasoner/ReasonerTimeoutTest.java | 7 +- .../implementation/AddDataSourceTest.java | 39 +++++-- .../implementation/AnswerQueryTest.java | 36 ++++-- .../ExportQueryAnswersToCsvFileTest.java | 9 +- .../FileDataSourceTestUtils.java | 5 +- .../GeneratedAnonymousIndividualsTest.java | 23 ++-- .../LoadDataFromCsvFileTest.java | 10 +- .../LoadDataFromMemoryTest.java | 17 ++- .../LoadDataFromRdfFileTest.java | 10 +- .../LoadDataFromSparqlQueryTest.java | 18 ++- .../implementation/ReasonerStateTest.java | 107 +++++++++++------- .../reasoner/implementation/ReasonerTest.java | 21 ++-- .../StratifiedNegationTest.java | 24 ++-- .../vlog4j/examples/DoidExample.java | 61 +++++----- .../examples/core/AddDataFromCsvFile.java | 7 +- .../examples/core/AddDataFromRdfFile.java | 7 +- .../core/AddDataFromSparqlQueryResults.java | 8 +- .../core/ConfigureReasonerLogging.java | 7 +- .../SkolemVsRestrictedChaseTermination.java | 7 +- .../examples/graal/AddDataFromDlgpFile.java | 13 ++- .../examples/graal/AddDataFromGraal.java | 9 +- .../owlapi/OwlOntologyToRulesAndFacts.java | 7 +- .../examples/rdf/AddDataFromRdfModel.java | 15 ++- .../vlog4j/rdf/TestReasonOverRdfFacts.java | 9 +- 29 files changed, 433 insertions(+), 247 deletions(-) create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/KnowledgeBaseImpl.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java index 2b2cc054b..981ecf4f7 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -1,7 +1,12 @@ package org.semanticweb.vlog4j.core.reasoner; +import java.util.List; import java.util.Observable; +import org.eclipse.jdt.annotation.NonNull; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.TermType; + /*- * #%L * VLog4j Core Components @@ -22,6 +27,42 @@ * #L% */ -public class KnowledgeBase extends Observable { +public abstract class KnowledgeBase extends Observable { + + /** + * Adds rules to the knowledge base in the given order. The reasoner may + * rewrite the rules internally according to the set + * {@link RuleRewriteStrategy}. + * + * @param rules non-null rules to be added to the knowledge base for + * reasoning. + * @throws IllegalArgumentException if the {@code rules} literals contain terms + * which are not of type + * {@link TermType#CONSTANT} or + * {@link TermType#VARIABLE}. + */ + public abstract void addRules(@NonNull Rule... rules); + + /** + * Adds rules to the knowledge base in the given order. The reasoner may + * rewrite the rules internally according to the set + * {@link RuleRewriteStrategy}. + * + * @param rules non-null rules to be added to the knowledge base for + * reasoning. + * @throws IllegalArgumentException if the {@code rules} literals contain terms + * which are not of type + * {@link TermType#CONSTANT} or + * {@link TermType#VARIABLE}. + */ + public abstract void addRules(@NonNull List rules); + + /** + * Get the list of all rules that have been added to the reasoner. The list is + * read-only and cannot be modified to add or delete rules. + * + * @return list of {@link Rule} + */ + public abstract List getRules(); } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 6ed22fc48..0c81868a9 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -2,7 +2,6 @@ import java.io.IOException; import java.util.Collection; -import java.util.List; import java.util.Observer; import org.eclipse.jdt.annotation.NonNull; @@ -198,45 +197,7 @@ public static Reasoner getInstance(KnowledgeBase knowledgeBase) { */ void setLogFile(@Nullable String filePath) throws ReasonerStateException; - /** - * Adds rules to the reasoner knowledge base in the given order. After - * the reasoner has been loaded ({@link #load()}), the rules may be rewritten - * internally according to the set {@link RuleRewriteStrategy}. - * - * @param rules non-null rules to be added to the knowledge base for - * reasoning. - * @throws ReasonerStateException if the reasoner has already been loaded. - * @throws IllegalArgumentException if the {@code rules} literals contain terms - * which are not of type - * {@link TermType#CONSTANT} or - * {@link TermType#VARIABLE}. - */ - void addRules(@NonNull Rule... rules) throws ReasonerStateException; - - /** - * Adds rules to the reasoner knowledge base in the given order. Rules - * can only be added before loading ({@link #load()}). After the reasoner has - * been loaded, the rules may be rewritten internally according to the set - * {@link RuleRewriteStrategy}. - * - * @param rules non-null rules to be added to the knowledge base for - * reasoning. - * @throws ReasonerStateException if the reasoner has already been loaded. - * @throws IllegalArgumentException if the {@code rules} literals contain terms - * which are not of type - * {@link TermType#CONSTANT} or - * {@link TermType#VARIABLE}. - */ - void addRules(@NonNull List rules) throws ReasonerStateException; - - /** - * Get the list of all rules that have been added to the reasoner. The list is - * read-only and cannot be modified to add or delete rules. - * - * @return list of {@link Rule} - */ - List getRules(); - + /** * Adds non-null facts to the reasoner knowledge base. A fact is a * {@link PositiveLiteral} with all terms ({@link PositiveLiteral#getTerms()}) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/KnowledgeBaseImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/KnowledgeBaseImpl.java new file mode 100644 index 000000000..93dd8477b --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/KnowledgeBaseImpl.java @@ -0,0 +1,56 @@ +package org.semanticweb.vlog4j.core.reasoner.implementation; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import org.apache.commons.lang3.Validate; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; + +public class KnowledgeBaseImpl extends KnowledgeBase { + + private final List rules = new ArrayList<>(); + + @Override + public void addRules(final Rule... rules) { + addRules(Arrays.asList(rules)); + } + + @Override + public void addRules(final List rules) { + Validate.noNullElements(rules, "Null rules are not alowed! The list contains a null at position [%d]."); + this.rules.addAll(new ArrayList<>(rules)); + + + // TODO setChanged + // TODO notify listeners with the diff + } + + @Override + public List getRules() { + return Collections.unmodifiableList(this.rules); + } + +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 8fe86c7c3..608c5b4be 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -1,14 +1,11 @@ package org.semanticweb.vlog4j.core.reasoner.implementation; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.Formatter; import java.util.HashMap; import java.util.HashSet; -import java.util.List; import java.util.Map; import java.util.Observable; import java.util.Set; @@ -76,7 +73,6 @@ public class VLogReasoner implements Reasoner { private Integer timeoutAfterSeconds; private RuleRewriteStrategy ruleRewriteStrategy = RuleRewriteStrategy.NONE; - private final List rules = new ArrayList<>(); private final Map> factsForPredicate = new HashMap<>(); private final Map dataSourceForPredicate = new HashMap<>(); @@ -120,28 +116,6 @@ public Integer getReasoningTimeout() { return this.timeoutAfterSeconds; } - @Override - public void addRules(final Rule... rules) throws ReasonerStateException { - addRules(Arrays.asList(rules)); - } - - @Override - public void addRules(final List rules) throws ReasonerStateException { - if (this.reasonerState != ReasonerState.BEFORE_LOADING) { - throw new ReasonerStateException(this.reasonerState, - "Rules cannot be added after the reasoner has been loaded! Call reset() to undo loading and reasoning."); - } - Validate.noNullElements(rules, "Null rules are not alowed! The list contains a null at position [%d]."); - this.rules.addAll(new ArrayList<>(rules)); - if (reasonerState.equals(ReasonerState.AFTER_CLOSING)) - LOGGER.warn("Adding rules to a closed reasoner."); - } - - @Override - public List getRules() { - return Collections.unmodifiableList(this.rules); - } - @Override public void setRuleRewriteStrategy(RuleRewriteStrategy ruleRewritingStrategy) throws ReasonerStateException { Validate.notNull(ruleRewritingStrategy, "Rewrite strategy cannot be null!"); @@ -245,7 +219,7 @@ public void load() loadInMemoryFacts(); - if (this.rules.isEmpty()) { + if (this.knowledgeBase.getRules().isEmpty()) { LOGGER.warn("No rules have been provided for reasoning."); } else { loadRules(); @@ -385,7 +359,7 @@ private Set collectEdbPredicates() { private Set collectIdbPredicates() { final Set idbPredicates = new HashSet<>(); - for (final Rule rule : this.rules) { + for (final Rule rule : this.knowledgeBase.getRules()) { for (final Literal headAtom : rule.getHead()) { idbPredicates.add(headAtom.getPredicate()); } @@ -422,7 +396,7 @@ private void loadInMemoryFacts() { } private void loadRules() { - final karmaresearch.vlog.Rule[] vLogRuleArray = ModelToVLogConverter.toVLogRuleArray(this.rules); + final karmaresearch.vlog.Rule[] vLogRuleArray = ModelToVLogConverter.toVLogRuleArray(this.knowledgeBase.getRules()); final karmaresearch.vlog.VLog.RuleRewriteStrategy vLogRuleRewriteStrategy = ModelToVLogConverter .toVLogRuleRewriteStrategy(this.ruleRewriteStrategy); try { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java index e1c1a555e..6b76d1fe7 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java @@ -39,6 +39,7 @@ import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; +import org.semanticweb.vlog4j.core.reasoner.implementation.KnowledgeBaseImpl; public class LoggingTest { @@ -59,12 +60,14 @@ public class LoggingTest { @Test public void testSetLogFileNull() throws ReasonerStateException, IOException, EdbIdbSeparationException, IncompatiblePredicateArityException { - try (final Reasoner instance = Reasoner.getInstance(new KnowledgeBase())) { + final KnowledgeBase kb = new KnowledgeBaseImpl(); + kb.addRules(rule); + + try (final Reasoner instance = Reasoner.getInstance(kb)) { instance.setLogFile(null); instance.setLogLevel(LogLevel.INFO); instance.addFacts(factPc); - instance.addRules(rule); instance.load(); instance.reason(); } @@ -74,14 +77,15 @@ public void testSetLogFileNull() throws ReasonerStateException, IOException, Edb @Test public void testSetLogFileInexistent() throws ReasonerStateException, IOException, EdbIdbSeparationException, IncompatiblePredicateArityException { final String inexistentFilePath = LOGS_FOLDER + "a/b"; + final KnowledgeBase kb = new KnowledgeBaseImpl(); + kb.addRules(rule); - try (final Reasoner instance = Reasoner.getInstance(new KnowledgeBase())) { + try (final Reasoner instance = Reasoner.getInstance(kb)) { instance.setLogFile(inexistentFilePath); assertFalse(new File(inexistentFilePath).exists()); instance.setLogLevel(LogLevel.INFO); instance.addFacts(factPc); - instance.addRules(rule); instance.load(); instance.reason(); } @@ -91,7 +95,7 @@ public void testSetLogFileInexistent() throws ReasonerStateException, IOExceptio @Test(expected = NullPointerException.class) public void testSetLogLevelNull() throws ReasonerStateException { - try (final Reasoner instance = Reasoner.getInstance(new KnowledgeBase())) { + try (final Reasoner instance = Reasoner.getInstance(new KnowledgeBaseImpl())) { instance.setLogLevel(null); } } @@ -102,9 +106,11 @@ public void testSetLogFileAppendsToFile() throws EdbIdbSeparationException, IOEx assertFalse(new File(logFilePath).exists()); int countLinesBeforeReset = 0; - try (final Reasoner instance = Reasoner.getInstance(new KnowledgeBase())) { + final KnowledgeBase kb = new KnowledgeBaseImpl(); + kb.addRules(rule); + + try (final Reasoner instance = Reasoner.getInstance(kb)) { instance.addFacts(factPc); - instance.addRules(rule); instance.setLogLevel(LogLevel.INFO); instance.setLogFile(logFilePath); instance.load(); @@ -127,10 +133,12 @@ public void testSetLogFileAppendsToFile() throws EdbIdbSeparationException, IOEx public void testLogLevelInfo() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { final String logFilePath = LOGS_FOLDER + System.currentTimeMillis() + "-testLogLevelInfo.log"; assertFalse(new File(logFilePath).exists()); + + final KnowledgeBase kb = new KnowledgeBaseImpl(); + kb.addRules(rule); - try (final Reasoner instance = Reasoner.getInstance(new KnowledgeBase())) { + try (final Reasoner instance = Reasoner.getInstance(kb)) { instance.addFacts(factPc); - instance.addRules(rule); instance.setLogLevel(LogLevel.INFO); instance.setLogFile(logFilePath); @@ -149,9 +157,11 @@ public void testLogLevelDebug() throws ReasonerStateException, EdbIdbSeparationE final String logFilePath = LOGS_FOLDER + System.currentTimeMillis() + "-testLogLevelDebug.log"; assertFalse(new File(logFilePath).exists()); - try (final Reasoner instance = Reasoner.getInstance(new KnowledgeBase())) { + final KnowledgeBase kb = new KnowledgeBaseImpl(); + kb.addRules(rule); + + try (final Reasoner instance = Reasoner.getInstance(kb)) { instance.addFacts(factPc); - instance.addRules(rule); instance.setLogLevel(LogLevel.DEBUG); instance.setLogFile(logFilePath); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java index 3c8158b9b..1d7a52ff5 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java @@ -43,6 +43,7 @@ import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; +import org.semanticweb.vlog4j.core.reasoner.implementation.KnowledgeBaseImpl; /** * Test case ensuring {@link Reasoner#setReasoningTimeout(Integer)} works as expected and terminates reasoning after the given {@link #timeout}. @@ -108,10 +109,12 @@ public static void setUpBeforeClass() { @Before public void setUp() throws ReasonerStateException { - reasoner = Reasoner.getInstance(new KnowledgeBase()); + final KnowledgeBase kb = new KnowledgeBaseImpl(); + kb.addRules(rules); + + reasoner = Reasoner.getInstance(kb); reasoner.addFacts(facts); - reasoner.addRules(rules); } @Test diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java index d804f31dc..d193da4ea 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java @@ -55,7 +55,9 @@ public void testAddDataSourceExistentDataForDifferentPredicates() throws Reasone final Predicate predicateLArity1 = Expressions.makePredicate("l", 1); final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); - try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { + final KnowledgeBase kb = new KnowledgeBaseImpl(); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.addFacts(factPredicatePArity2, factPredicateQArity1); reasoner.addFactsFromDataSource(predicateLArity1, dataSource); reasoner.addFactsFromDataSource(predicateParity1, dataSource); @@ -79,7 +81,10 @@ public void testAddDataSourceBeforeLoading() throws ReasonerStateException, EdbI final Predicate predicateP = Expressions.makePredicate("p", 1); final Predicate predicateQ = Expressions.makePredicate("q", 1); final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); - try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { + + final KnowledgeBase kb = new KnowledgeBaseImpl(); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.addFactsFromDataSource(predicateP, dataSource); reasoner.addFactsFromDataSource(predicateQ, dataSource); reasoner.load(); @@ -92,7 +97,10 @@ public void testAddDataSourceAfterLoading() throws ReasonerStateException, EdbId final Predicate predicateP = Expressions.makePredicate("p", 1); final Predicate predicateQ = Expressions.makePredicate("q", 1); final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); - try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { + + final KnowledgeBase kb = new KnowledgeBaseImpl(); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.addFactsFromDataSource(predicateP, dataSource); reasoner.load(); reasoner.addFactsFromDataSource(predicateQ, dataSource); @@ -105,7 +113,10 @@ public void testAddDataSourceAfterReasoning() throws ReasonerStateException, Edb final Predicate predicateP = Expressions.makePredicate("p", 1); final Predicate predicateQ = Expressions.makePredicate("q", 1); final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); - try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { + + final KnowledgeBase kb = new KnowledgeBaseImpl(); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.addFactsFromDataSource(predicateP, dataSource); reasoner.load(); reasoner.reason(); @@ -117,7 +128,10 @@ public void testAddDataSourceAfterReasoning() throws ReasonerStateException, Edb public void testAddDataSourceNoMultipleDataSourcesForPredicate() throws ReasonerStateException, IOException { final Predicate predicate = Expressions.makePredicate("p", 1); final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); - try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { + + final KnowledgeBase kb = new KnowledgeBaseImpl(); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.addFactsFromDataSource(predicate, dataSource); reasoner.addFactsFromDataSource(predicate, dataSource); } @@ -128,7 +142,10 @@ public void testAddDataSourceNoFactsForPredicate() throws ReasonerStateException final Predicate predicate = Expressions.makePredicate("p", 1); final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); final PositiveLiteral fact = Expressions.makePositiveLiteral(Expressions.makePredicate("p", 1), Expressions.makeConstant("a")); - try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { + + final KnowledgeBase kb = new KnowledgeBaseImpl(); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.addFacts(fact); reasoner.addFactsFromDataSource(predicate, dataSource); } @@ -137,7 +154,10 @@ public void testAddDataSourceNoFactsForPredicate() throws ReasonerStateException @Test(expected = NullPointerException.class) public void testAddDataSourcePredicateNotNull() throws ReasonerStateException, IOException { final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); - try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { + + final KnowledgeBase kb = new KnowledgeBaseImpl(); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.addFactsFromDataSource(null, dataSource); } } @@ -145,7 +165,10 @@ public void testAddDataSourcePredicateNotNull() throws ReasonerStateException, I @Test(expected = NullPointerException.class) public void testAddDataSourceNotNullDataSource() throws ReasonerStateException { final Predicate predicate = Expressions.makePredicate("p", 1); - try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { + + final KnowledgeBase kb = new KnowledgeBaseImpl(); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.addFactsFromDataSource(predicate, null); } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java index c9f02872d..dfa973107 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java @@ -68,8 +68,10 @@ public void testEDBQuerySameConstantSubstitutesSameVariableName() final boolean includeBlanks = false; @SuppressWarnings("unchecked") final Set> factCCD = Sets.newSet(Arrays.asList(constantC, constantC, constantD)); + + final KnowledgeBase kb = new KnowledgeBaseImpl(); - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { + try (final Reasoner reasoner = Reasoner.getInstance(kb)) { reasoner.addFacts(fact); reasoner.load(); @@ -110,12 +112,14 @@ public void testIDBQuerySameBlankSubstitutesSameVariableName() final Rule pX__pYY_pYZ = Expressions.makeRule(Expressions.makePositiveConjunction(pYY, pYZ), Expressions.makeConjunction(Expressions.makePositiveLiteral(predicate, x))); assertEquals(Sets.newSet(y, z), pX__pYY_pYZ.getExistentiallyQuantifiedVariables()); + + final KnowledgeBase kb = new KnowledgeBaseImpl(); + kb.addRules(pX__pYY_pYZ); - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { + try (final Reasoner reasoner = Reasoner.getInstance(kb)) { reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.SPLIT_HEAD_PIECES); reasoner.addFacts(Expressions.makePositiveLiteral(predicate, Expressions.makeConstant("c"))); - reasoner.addRules(pX__pYY_pYZ); reasoner.load(); reasoner.reason(); @@ -152,10 +156,12 @@ public void testIDBQuerySameIndividualSubstitutesSameVariableName() final Constant constantC = Expressions.makeConstant("c"); final Constant constantD = Expressions.makeConstant("d"); final PositiveLiteral factPcd = Expressions.makePositiveLiteral(predicate, constantC, constantD); + + final KnowledgeBase kb = new KnowledgeBaseImpl(); + kb.addRules(pXY__pXYYZZT); - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { + try (final Reasoner reasoner = Reasoner.getInstance(kb)) { reasoner.addFacts(factPcd); - reasoner.addRules(pXY__pXYYZZT); reasoner.load(); reasoner.reason(); @@ -218,9 +224,11 @@ public void queryResultWithBlanks() throws ReasonerStateException, EdbIdbSeparat final PositiveLiteral fact = Expressions.makePositiveLiteral("p", constantC); final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("q", Expressions.makeVariable("?x")); - try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { + final KnowledgeBase kb = new KnowledgeBaseImpl(); + kb.addRules(existentialRule); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.addFacts(fact); - reasoner.addRules(existentialRule); reasoner.load(); reasoner.reason(); @@ -241,7 +249,9 @@ public void queryResultWithBlanks() throws ReasonerStateException, EdbIdbSeparat @Test public void queryEmptyKnowledgeBase() throws IOException, EdbIdbSeparationException, ReasonerStateException, IncompatiblePredicateArityException { - try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { + final KnowledgeBase kb = new KnowledgeBaseImpl(); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("P", Expressions.makeVariable("?x")); @@ -259,7 +269,9 @@ public void queryEmptyKnowledgeBase() throws IOException, EdbIdbSeparationExcept @Test public void queryEmptyRules() throws IOException, EdbIdbSeparationException, ReasonerStateException, IncompatiblePredicateArityException { - try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { + final KnowledgeBase kb = new KnowledgeBaseImpl(); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { final PositiveLiteral fact = Expressions.makePositiveLiteral("P", Expressions.makeConstant("c")); reasoner.addFacts(fact); reasoner.load(); @@ -283,8 +295,10 @@ public void queryEmptyFacts() final Variable vx = Expressions.makeVariable("x"); final Rule rule = Expressions.makeRule(Expressions.makePositiveLiteral("q", vx), Expressions.makePositiveLiteral("p", vx)); - try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { - reasoner.addRules(rule); + final KnowledgeBase kb = new KnowledgeBaseImpl(); + kb.addRules(rule); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("P", Expressions.makeVariable("?x")); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExportQueryAnswersToCsvFileTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExportQueryAnswersToCsvFileTest.java index 3adabb19a..7fb92f020 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExportQueryAnswersToCsvFileTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExportQueryAnswersToCsvFileTest.java @@ -54,8 +54,10 @@ public void testEDBQuerySameConstantSubstitutesSameVariableName() final boolean includeBlanks = false; // final String csvFilePath = CSV_EXPORT_FOLDER + "output"; final List> factCCD = Arrays.asList(Arrays.asList("c", "c", "d")); + + final KnowledgeBase kb = new KnowledgeBaseImpl(); - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { + try (final Reasoner reasoner = Reasoner.getInstance(kb)) { reasoner.addFacts(fact); reasoner.load(); @@ -91,7 +93,10 @@ public void testExportQueryEmptyKnowledgeBase() throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("p", Expressions.makeVariable("?x"), Expressions.makeVariable("?y")); - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { + + final KnowledgeBase kb = new KnowledgeBaseImpl(); + + try (final Reasoner reasoner = Reasoner.getInstance(kb)) { reasoner.load(); final String emptyFilePath = FileDataSourceTestUtils.OUTPUT_FOLDER + "empty.csv"; reasoner.exportQueryAnswersToCsv(queryAtom, emptyFilePath, true); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java index 9501476d1..f6f147b28 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java @@ -104,7 +104,10 @@ public static void testConstructor(final FileDataSource fileDataSource, final Fi public static void testLoadEmptyFile(final Predicate predicate, final PositiveLiteral queryAtom, final FileDataSource emptyFileDataSource) throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { + + final KnowledgeBase kb = new KnowledgeBaseImpl(); + + try (final Reasoner reasoner = Reasoner.getInstance(kb)) { reasoner.addFactsFromDataSource(predicate, emptyFileDataSource); reasoner.load(); reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java index db2693726..275965fc0 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java @@ -71,12 +71,14 @@ public class GeneratedAnonymousIndividualsTest { @Test public void testBlanksSkolemChaseNoRuleRewrite() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { + final KnowledgeBase kb = new KnowledgeBaseImpl(); + kb.addRules(existentialRule); + + try (final Reasoner reasoner = Reasoner.getInstance(kb)) { reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); assertEquals(RuleRewriteStrategy.NONE, reasoner.getRuleRewriteStrategy()); reasoner.addFacts(fact); - reasoner.addRules(existentialRule); reasoner.load(); reasoner.reason(); reasoner.exportQueryAnswersToCsv(queryAtom, includeBlanksFilePath, true); @@ -88,14 +90,16 @@ public void testBlanksSkolemChaseNoRuleRewrite() @Test public void testBlanksSkolemChaseSplitHeadPieces() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { + final KnowledgeBase kb = new KnowledgeBaseImpl(); + kb.addRules(existentialRule); + + try (final Reasoner reasoner = Reasoner.getInstance(kb)) { reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); // P(?x) -> P(?x,!y), P(?x,!z) // after split becomes {{P(?x) -> P(?x,!y), {P(?x)-> P(?x,!z)}} reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.SPLIT_HEAD_PIECES); reasoner.addFacts(fact); - reasoner.addRules(existentialRule); reasoner.load(); reasoner.reason(); reasoner.exportQueryAnswersToCsv(queryAtom, includeBlanksFilePath, true); @@ -107,12 +111,14 @@ public void testBlanksSkolemChaseSplitHeadPieces() @Test public void testBlanksRestrictedChaseNoRuleRewrite() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { + final KnowledgeBase kb = new KnowledgeBaseImpl(); + kb.addRules(existentialRule); + + try (final Reasoner reasoner = Reasoner.getInstance(kb)) { reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); assertEquals(RuleRewriteStrategy.NONE, reasoner.getRuleRewriteStrategy()); reasoner.addFacts(fact); - reasoner.addRules(existentialRule); reasoner.load(); reasoner.reason(); reasoner.exportQueryAnswersToCsv(queryAtom, includeBlanksFilePath, true); @@ -124,15 +130,16 @@ public void testBlanksRestrictedChaseNoRuleRewrite() @Test public void testBlanksRestrictedChaseSplitHeadPieces() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + final KnowledgeBase kb = new KnowledgeBaseImpl(); + kb.addRules(existentialRule); - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { + try (final Reasoner reasoner = Reasoner.getInstance(kb)) { reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); // {P(?x) -> P(?x,!y), P(?x,!z)} // after split becomes {{P(?x) -> P(?x,!y), {P(?x)-> P(?x,!z)}} reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.SPLIT_HEAD_PIECES); reasoner.addFacts(fact); - reasoner.addRules(existentialRule); reasoner.load(); reasoner.reason(); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromCsvFileTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromCsvFileTest.java index c3ef3ba44..2dd81d66f 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromCsvFileTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromCsvFileTest.java @@ -76,7 +76,9 @@ public void testLoadUnaryFactsFromCsvFile() throws ReasonerStateException, EdbId private void testLoadUnaryFactsFromSingleCsvDataSource(final FileDataSource fileDataSource) throws ReasonerStateException, EdbIdbSeparationException, EDBConfigurationException, IOException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { + final KnowledgeBase kb = new KnowledgeBaseImpl(); + + try (final Reasoner reasoner = Reasoner.getInstance(kb)) { reasoner.addFactsFromDataSource(unaryPredicate1, fileDataSource); reasoner.addFactsFromDataSource(unaryPredicate2, fileDataSource); reasoner.load(); @@ -108,8 +110,9 @@ public void testLoadNonexistingCsvFile() final File nonexistingFile = new File("nonexistingFile.csv"); assertFalse(nonexistingFile.exists()); final FileDataSource fileDataSource = new CsvFileDataSource(nonexistingFile); + final KnowledgeBase kb = new KnowledgeBaseImpl(); - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { + try (final Reasoner reasoner = Reasoner.getInstance(kb)) { reasoner.addFactsFromDataSource(unaryPredicate1, fileDataSource); reasoner.load(); } @@ -120,8 +123,9 @@ public void testLoadCsvFileWrongArity() throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { final FileDataSource fileDataSource = new CsvFileDataSource( new File(FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.binaryCsvFileNameRoot + ".csv")); + final KnowledgeBase kb = new KnowledgeBaseImpl(); - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { + try (final Reasoner reasoner = Reasoner.getInstance(kb)) { reasoner.addFactsFromDataSource(unaryPredicate1, fileDataSource); reasoner.load(); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java index 5d14185df..54890d269 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java @@ -45,8 +45,10 @@ public void loadEdbIdbNotSeparated() final PositiveLiteral factIDBpredQ1 = Expressions.makePositiveLiteral("q", Expressions.makeConstant("c")); final PositiveLiteral factEDBpredQ2 = Expressions.makePositiveLiteral("q", Expressions.makeConstant("d"), Expressions.makeConstant("d")); - try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { - reasoner.addRules(rule); + final KnowledgeBase kb = new KnowledgeBaseImpl(); + kb.addRules(rule); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.addFacts(factIDBpredQ1, factEDBpredQ2); reasoner.load(); } @@ -59,9 +61,10 @@ public void loadEdbIdbSeparated() final Rule rule = Expressions.makeRule(Expressions.makePositiveLiteral("q", vx), Expressions.makePositiveLiteral("p", vx)); final PositiveLiteral factEDBpred = Expressions.makePositiveLiteral("q", Expressions.makeConstant("d"), Expressions.makeConstant("d")); + final KnowledgeBase kb = new KnowledgeBaseImpl(); + kb.addRules(rule); - try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { - reasoner.addRules(rule); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.addFacts(factEDBpred); reasoner.load(); } @@ -71,8 +74,9 @@ public void loadEdbIdbSeparated() public void addFactsWithVariableTerms() throws ReasonerStateException { final PositiveLiteral factWithVariableTerms = Expressions.makePositiveLiteral("q", Expressions.makeConstant("d"), Expressions.makeVariable("x")); + final KnowledgeBase kb = new KnowledgeBaseImpl(); - try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.addFacts(factWithVariableTerms); } } @@ -80,8 +84,9 @@ public void addFactsWithVariableTerms() throws ReasonerStateException { @Test(expected = IllegalArgumentException.class) public void addFactsWithBlankTerms() throws ReasonerStateException { final PositiveLiteral factWithBlankTerms = Expressions.makePositiveLiteral("q", Expressions.makeConstant("d"), new BlankImpl("b")); + final KnowledgeBase kb = new KnowledgeBaseImpl(); - try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.addFacts(factWithBlankTerms); } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java index 021ca1c78..b46298dce 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java @@ -77,7 +77,9 @@ public void testLoadTernaryFactsFromRdfFile() throws ReasonerStateException, Edb public void testLoadTernaryFactsFromSingleRdfDataSource(final FileDataSource fileDataSource) throws ReasonerStateException, EdbIdbSeparationException, EDBConfigurationException, IOException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { + final KnowledgeBase kb = new KnowledgeBaseImpl(); + + try (final Reasoner reasoner = Reasoner.getInstance(kb)) { reasoner.addFactsFromDataSource(ternaryPredicate, fileDataSource); reasoner.load(); @@ -94,8 +96,9 @@ public void testLoadNonexistingRdfFile() final File nonexistingFile = new File("nonexistingFile.nt"); assertFalse(nonexistingFile.exists()); final FileDataSource fileDataSource = new RdfFileDataSource(nonexistingFile); + final KnowledgeBase kb = new KnowledgeBaseImpl(); - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { + try (final Reasoner reasoner = Reasoner.getInstance(kb)) { reasoner.addFactsFromDataSource(ternaryPredicate, fileDataSource); reasoner.load(); } @@ -106,8 +109,9 @@ public void testLoadRdfInvalidFormat() throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { final FileDataSource fileDataSource = new RdfFileDataSource( new File(FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.invalidFormatNtFileNameRoot + ".nt")); + final KnowledgeBase kb = new KnowledgeBaseImpl(); - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { + try (final Reasoner reasoner = Reasoner.getInstance(kb)) { reasoner.addFactsFromDataSource(ternaryPredicate, fileDataSource); reasoner.load(); FileDataSourceTestUtils.testNoFactsOverPredicate(reasoner, queryAtom); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromSparqlQueryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromSparqlQueryTest.java index 8e74622ab..64548b8dd 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromSparqlQueryTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromSparqlQueryTest.java @@ -61,8 +61,9 @@ public void testSimpleSparqlQuery() // a has father b "?a wdt:P22 ?b"); final Predicate fatherOfPredicate = Expressions.makePredicate("FatherOf", 2); + final KnowledgeBase kb = new KnowledgeBaseImpl(); - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { + try (final Reasoner reasoner = Reasoner.getInstance(kb)) { reasoner.addFactsFromDataSource(fatherOfPredicate, dataSource); reasoner.load(); try (final QueryResultIterator answerQuery = reasoner.answerQuery(Expressions.makePositiveLiteral(fatherOfPredicate, @@ -86,8 +87,9 @@ public void testSimpleSparqlQueryHttps() // a has father b "?a wdt:P22 ?b"); final Predicate fatherOfPredicate = Expressions.makePredicate("FatherOf", 2); + final KnowledgeBase kb = new KnowledgeBaseImpl(); - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { + try (final Reasoner reasoner = Reasoner.getInstance(kb)) { reasoner.addFactsFromDataSource(fatherOfPredicate, dataSource); reasoner.load(); try (final QueryResultIterator answerQuery = reasoner.answerQuery(Expressions.makePositiveLiteral(fatherOfPredicate, @@ -119,8 +121,9 @@ public void testSimpleSparqlQuery2() // a has father b "?a wdt:P22 ?b ."); final Predicate fatherOfPredicate = Expressions.makePredicate("FatherOf", 2); + final KnowledgeBase kb = new KnowledgeBaseImpl(); - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { + try (final Reasoner reasoner = Reasoner.getInstance(kb)) { reasoner.addFactsFromDataSource(fatherOfPredicate, dataSource); reasoner.load(); try (final QueryResultIterator answerQuery = reasoner.answerQuery(Expressions.makePositiveLiteral(fatherOfPredicate, @@ -142,8 +145,9 @@ public void testConjunctiveQueryNewLineCharacterInQueryBody() // b has father a and b has mother c "?b wdt:P22 ?a .\n" + "?b wdt:P25 ?c"); final Predicate haveChildrenTogether = Expressions.makePredicate("haveChildrenTogether", 2); + final KnowledgeBase kb = new KnowledgeBaseImpl(); - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { + try (final Reasoner reasoner = Reasoner.getInstance(kb)) { reasoner.addFactsFromDataSource(haveChildrenTogether, dataSource); reasoner.load(); reasoner.answerQuery(Expressions.makePositiveLiteral(haveChildrenTogether, Expressions.makeVariable("x"), @@ -162,8 +166,9 @@ public void testConjunctiveQuery() // b has father a and b has mother c "?b wdt:P22 ?a ." + "?b wdt:P25 ?c"); final Predicate haveChildrenTogether = Expressions.makePredicate("haveChildrenTogether", 2); + final KnowledgeBase kb = new KnowledgeBaseImpl(); - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { + try (final Reasoner reasoner = Reasoner.getInstance(kb)) { reasoner.addFactsFromDataSource(haveChildrenTogether, dataSource); reasoner.load(); try (final QueryResultIterator answerQuery = reasoner.answerQuery(Expressions.makePositiveLiteral(haveChildrenTogether, @@ -184,8 +189,9 @@ public void testDataSourcePredicateDoesNotMatchSparqlQueryTerms() final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(endpoint, queryVariables, // b has father a and b has mother c "?b wdt:P22 ?a ." + "?b wdt:P25 ?c"); + final KnowledgeBase kb = new KnowledgeBaseImpl(); - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { + try (final Reasoner reasoner = Reasoner.getInstance(kb)) { // TODO must validate predicate arity sonner reasoner.addFactsFromDataSource(Expressions.makePredicate("ternary", 3), dataSource); reasoner.load(); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java index 6df89d531..44651db01 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java @@ -32,6 +32,7 @@ import java.util.List; import java.util.Set; +import org.junit.Ignore; import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; @@ -65,56 +66,69 @@ public class ReasonerStateTest { @Test(expected = NullPointerException.class) public void testSetAlgorithm() { - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase());) { + final KnowledgeBase kb = new KnowledgeBaseImpl(); + try (final Reasoner reasoner = Reasoner.getInstance(kb);) { reasoner.setAlgorithm(null); } } @Test(expected = IllegalArgumentException.class) public void testSetReasoningTimeout() { - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase());) { + final KnowledgeBase kb = new KnowledgeBaseImpl(); + try (final Reasoner reasoner = Reasoner.getInstance(kb);) { reasoner.setReasoningTimeout(-3); } } + // FIXME update test + @Ignore @Test(expected = ReasonerStateException.class) - public void testAddRules1() throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase());) { + public void testAddRules1() + throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + final KnowledgeBase kb = new KnowledgeBaseImpl(); + kb.addRules(ruleQxPx); + try (final Reasoner reasoner = Reasoner.getInstance(kb);) { reasoner.load(); - reasoner.addRules(ruleQxPx); } } @Test - public void testAddRules2() throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase());) { + public void testAddRules2() + throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + final KnowledgeBase kb = new KnowledgeBaseImpl(); + kb.addRules(ruleQxPx); + try (final Reasoner reasoner = Reasoner.getInstance(kb);) { reasoner.load(); reasoner.resetReasoner(); - reasoner.addRules(ruleQxPx); } } @Test(expected = IllegalArgumentException.class) - public void testAddRules3() throws EdbIdbSeparationException, IOException, ReasonerStateException { - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase());) { - final List rules = new ArrayList<>(); - rules.add(ruleQxPx); - rules.add(null); - reasoner.addRules(rules); - } + public void testAddRules3() { + final KnowledgeBase kb = new KnowledgeBaseImpl(); + final List rules = new ArrayList<>(); + rules.add(ruleQxPx); + rules.add(null); + kb.addRules(rules); } @Test(expected = ReasonerStateException.class) - public void testAddFacts1() throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase());) { + public void testAddFacts1() + throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + + final KnowledgeBase kb = new KnowledgeBaseImpl(); + try (final Reasoner reasoner = Reasoner.getInstance(kb);) { reasoner.load(); reasoner.addFacts(factPc); } } @Test(expected = IllegalArgumentException.class) - public void testAddFacts2() throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase());) { + public void testAddFacts2() + throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + final KnowledgeBase kb = new KnowledgeBaseImpl(); + + try (final Reasoner reasoner = Reasoner.getInstance(kb);) { final List facts = new ArrayList<>(); facts.add(factPc); facts.add(null); @@ -125,29 +139,31 @@ public void testAddFacts2() throws EdbIdbSeparationException, IOException, Reaso @Test public void testResetBeforeLoad() throws ReasonerStateException { - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBaseImpl())) { reasoner.resetReasoner(); } } @Test(expected = NullPointerException.class) public void setRuleRewriteStrategy1() throws ReasonerStateException { - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase());) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBaseImpl());) { reasoner.setRuleRewriteStrategy(null); } } @Test(expected = ReasonerStateException.class) - public void setRuleRewriteStrategy2() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase());) { + public void setRuleRewriteStrategy2() + throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBaseImpl());) { reasoner.load(); reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.NONE); } } @Test - public void setRuleRewriteStrategy3() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase());) { + public void setRuleRewriteStrategy3() + throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBaseImpl());) { reasoner.load(); reasoner.resetReasoner(); reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.NONE); @@ -155,12 +171,15 @@ public void setRuleRewriteStrategy3() throws ReasonerStateException, EdbIdbSepar } @Test - public void testResetDiscardInferences() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testResetDiscardInferences() + throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + final KnowledgeBase kb = new KnowledgeBaseImpl(); + kb.addRules(ruleQxPx); + for (final Algorithm algorithm : Algorithm.values()) { // discard inferences regardless of the inference algorithm - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase());) { + try (final Reasoner reasoner = Reasoner.getInstance(kb);) { reasoner.addFacts(factPc); - reasoner.addRules(ruleQxPx); reasoner.setAlgorithm(algorithm); reasoner.load(); @@ -189,8 +208,12 @@ public void testResetDiscardInferences() throws ReasonerStateException, EdbIdbSe } @Test - public void testResetKeepExplicitDatabase() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase());) { + public void testResetKeepExplicitDatabase() + throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + final KnowledgeBase kb = new KnowledgeBaseImpl(); + kb.addRules(ruleQxPx); + + try (final Reasoner reasoner = Reasoner.getInstance(kb);) { // assert p(c) reasoner.addFacts(factPc); // assert r(d) @@ -198,7 +221,6 @@ public void testResetKeepExplicitDatabase() throws ReasonerStateException, EdbId reasoner.addFactsFromDataSource(predicateR1, new CsvFileDataSource(new File(FileDataSourceTestUtils.INPUT_FOLDER, "constantD.csv"))); // p(?x) -> q(?x) - reasoner.addRules(ruleQxPx); reasoner.load(); checkExplicitFacts(reasoner, predicateR1); @@ -233,8 +255,11 @@ private void checkExplicitFacts(final Reasoner reasoner, final Predicate predica } @Test - public void testResetEmptyKnowledgeBase() throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { - final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase()); + public void testResetEmptyKnowledgeBase() + throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + final KnowledgeBase kb = new KnowledgeBaseImpl(); + + final Reasoner reasoner = Reasoner.getInstance(kb); // 1. load and reason reasoner.load(); try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(exampleQueryAtom, true)) { @@ -267,28 +292,30 @@ public void testResetEmptyKnowledgeBase() throws EdbIdbSeparationException, IOEx @Test(expected = ReasonerStateException.class) public void testFailReasonBeforeLoad() throws ReasonerStateException, IOException { - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBaseImpl())) { reasoner.reason(); } } @Test(expected = ReasonerStateException.class) public void testFailAnswerQueryBeforeLoad() throws ReasonerStateException { - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBaseImpl())) { reasoner.answerQuery(exampleQueryAtom, true); } } @Test(expected = ReasonerStateException.class) public void testFailExportQueryAnswerToCsvBeforeLoad() throws ReasonerStateException, IOException { - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { - reasoner.exportQueryAnswersToCsv(exampleQueryAtom, FileDataSourceTestUtils.OUTPUT_FOLDER + "output.csv", true); + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBaseImpl())) { + reasoner.exportQueryAnswersToCsv(exampleQueryAtom, FileDataSourceTestUtils.OUTPUT_FOLDER + "output.csv", + true); } } @Test - public void testSuccessiveCloseAfterLoad() throws EdbIdbSeparationException, IOException, IncompatiblePredicateArityException, ReasonerStateException { - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { + public void testSuccessiveCloseAfterLoad() + throws EdbIdbSeparationException, IOException, IncompatiblePredicateArityException, ReasonerStateException { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBaseImpl())) { reasoner.load(); reasoner.close(); reasoner.close(); @@ -297,7 +324,7 @@ public void testSuccessiveCloseAfterLoad() throws EdbIdbSeparationException, IOE @Test public void testSuccessiveCloseBeforeLoad() { - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { + try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBaseImpl())) { reasoner.close(); reasoner.close(); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerTest.java index c95796f98..861ac3667 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerTest.java @@ -63,11 +63,11 @@ public class ReasonerTest { @Test public void testCloseRepeatedly() throws EdbIdbSeparationException, IOException, IncompatiblePredicateArityException, ReasonerStateException { - try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { + try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBaseImpl())) { reasoner.close(); } - try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { + try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBaseImpl())) { reasoner.load(); reasoner.close(); reasoner.close(); @@ -77,20 +77,23 @@ public void testCloseRepeatedly() @Test public void testLoadRules() throws EdbIdbSeparationException, IOException, IncompatiblePredicateArityException, ReasonerStateException { - try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { - reasoner.addRules(ruleBxAx, ruleCxBx); - reasoner.addRules(ruleBxAx); - assertEquals(reasoner.getRules(), Arrays.asList(ruleBxAx, ruleCxBx, ruleBxAx)); + final KnowledgeBase kb = new KnowledgeBaseImpl(); + kb.addRules(ruleBxAx, ruleCxBx); + kb.addRules(ruleBxAx); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + assertEquals(kb.getRules(), Arrays.asList(ruleBxAx, ruleCxBx, ruleBxAx)); } } @Test public void testSimpleInference() throws EDBConfigurationException, IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { + final KnowledgeBase kb = new KnowledgeBaseImpl(); + kb.addRules(ruleBxAx, ruleCxBx); - try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.addFacts(factAc, factAd); - reasoner.addRules(ruleBxAx, ruleCxBx); reasoner.load(); final QueryResultIterator cxQueryResultEnumBeforeReasoning = reasoner.answerQuery(atomCx, true); @@ -111,7 +114,7 @@ public void testSimpleInference() throws EDBConfigurationException, IOException, @Test public void testGenerateDataSourcesConfigEmpty() throws ReasonerStateException, IOException { - try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { + try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBaseImpl())) { final String dataSourcesConfig = reasoner.generateDataSourcesConfig(); assertTrue(dataSourcesConfig.isEmpty()); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/StratifiedNegationTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/StratifiedNegationTest.java index 2e0362a38..dfd93bb26 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/StratifiedNegationTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/StratifiedNegationTest.java @@ -60,9 +60,11 @@ public void testNotStratifiableEdbIdbSeparation() final Rule rule = makeRule(qXY, pXY, notQXY); final PositiveLiteral fact = makePositiveLiteral("Q", makeConstant("c"), makeConstant("d")); + + final KnowledgeBase kb = new KnowledgeBaseImpl(); + kb.addRules(rule); - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { - reasoner.addRules(rule); + try (final Reasoner reasoner = Reasoner.getInstance(kb)) { reasoner.addFacts(fact); reasoner.load(); @@ -82,9 +84,11 @@ public void testNotStratifiable() final Rule rule = makeRule(qXY, pXY, notQXY); final PositiveLiteral fact = makePositiveLiteral("P", makeConstant("c"), makeConstant("d")); + + final KnowledgeBase kb = new KnowledgeBaseImpl(); + kb.addRules(rule); - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { - reasoner.addRules(rule); + try (final Reasoner reasoner = Reasoner.getInstance(kb)) { reasoner.addFacts(fact); reasoner.load(); @@ -112,8 +116,10 @@ public void testStratifiable() final PositiveLiteral qCD = makePositiveLiteral("Q", makeConstant("c"), makeConstant("d")); - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { - reasoner.addRules(rule); + final KnowledgeBase kb = new KnowledgeBaseImpl(); + kb.addRules(rule); + + try (final Reasoner reasoner = Reasoner.getInstance(kb)) { reasoner.addFacts(pCD, pEF, qCD); reasoner.load(); @@ -147,8 +153,10 @@ public void testInputNegation() final PositiveLiteral qCD = makePositiveLiteral("Q", makeConstant("c"), makeConstant("d")); - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { - reasoner.addRules(rule); + final KnowledgeBase kb = new KnowledgeBaseImpl(); + kb.addRules(rule); + + try (final Reasoner reasoner = Reasoner.getInstance(kb)) { reasoner.addFacts(pCD, pEF, qCD); reasoner.load(); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java index f8d9d0fef..1569dfab7 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java @@ -38,6 +38,7 @@ import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; +import org.semanticweb.vlog4j.core.reasoner.implementation.KnowledgeBaseImpl; import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; @@ -64,7 +65,34 @@ public static void main(final String[] args) final URL wikidataSparqlEndpoint = new URL("https://query.wikidata.org/sparql"); - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { + final KnowledgeBase kb = new KnowledgeBaseImpl(); + /* Load rules from DLGP file */ + try (final DlgpParser parser = new DlgpParser( + new File(ExamplesUtils.INPUT_FOLDER + "/graal", "doid-example.dlgp"))) { + while (parser.hasNext()) { + final Object object = parser.next(); + if (object instanceof fr.lirmm.graphik.graal.api.core.Rule) { + kb.addRules(GraalToVLog4JModelConverter.convertRule((fr.lirmm.graphik.graal.api.core.Rule) object)); + } + } + } + /* Create additional rules with negated literals */ + final Variable x = makeVariable("X"); + final Variable y = makeVariable("Y"); + final Variable z = makeVariable("Z"); + // humansWhoDiedOfNoncancer(X):-deathCause(X,Y),diseaseId(Y,Z),~cancerDisease(Z) + final NegativeLiteral notCancerDisease = Expressions.makeNegativeLiteral("cancerDisease", z); + final PositiveLiteral diseaseId = Expressions.makePositiveLiteral("diseaseId", y, z); + final PositiveLiteral deathCause = Expressions.makePositiveLiteral("deathCause", x, y); + final PositiveLiteral humansWhoDiedOfNoncancer = Expressions.makePositiveLiteral("humansWhoDiedOfNoncancer", x); + kb.addRules(Expressions.makeRule(Expressions.makePositiveConjunction(humansWhoDiedOfNoncancer), + Expressions.makeConjunction(deathCause, diseaseId, notCancerDisease))); + // humansWhoDiedOfNoncancer(X) :- deathCause(X,Y), ~hasDoid(Y) + final NegativeLiteral hasNotDoid = Expressions.makeNegativeLiteral("hasDoid", y); + kb.addRules(Expressions.makeRule(Expressions.makePositiveConjunction(humansWhoDiedOfNoncancer), + Expressions.makeConjunction(deathCause, hasNotDoid))); + + try (final Reasoner reasoner = Reasoner.getInstance(kb)) { /* Configure RDF data source */ final Predicate doidTriplePredicate = makePredicate("doidTriple", 3); @@ -94,37 +122,8 @@ public static void main(final String[] args) final Predicate recentDeathsCausePredicate = Expressions.makePredicate("recentDeathsCause", 2); reasoner.addFactsFromDataSource(recentDeathsCausePredicate, recentDeathsCauseDataSource); - /* Load rules from DLGP file */ - try (final DlgpParser parser = new DlgpParser( - new File(ExamplesUtils.INPUT_FOLDER + "/graal", "doid-example.dlgp"))) { - while (parser.hasNext()) { - final Object object = parser.next(); - if (object instanceof fr.lirmm.graphik.graal.api.core.Rule) { - reasoner.addRules( - GraalToVLog4JModelConverter.convertRule((fr.lirmm.graphik.graal.api.core.Rule) object)); - } - } - } - - /* Create additional rules with negated literals */ - final Variable x = makeVariable("X"); - final Variable y = makeVariable("Y"); - final Variable z = makeVariable("Z"); - // humansWhoDiedOfNoncancer(X):-deathCause(X,Y),diseaseId(Y,Z),~cancerDisease(Z) - final NegativeLiteral notCancerDisease = Expressions.makeNegativeLiteral("cancerDisease", z); - final PositiveLiteral diseaseId = Expressions.makePositiveLiteral("diseaseId", y, z); - final PositiveLiteral deathCause = Expressions.makePositiveLiteral("deathCause", x, y); - final PositiveLiteral humansWhoDiedOfNoncancer = Expressions.makePositiveLiteral("humansWhoDiedOfNoncancer", - x); - reasoner.addRules(Expressions.makeRule(Expressions.makePositiveConjunction(humansWhoDiedOfNoncancer), - Expressions.makeConjunction(deathCause, diseaseId, notCancerDisease))); - // humansWhoDiedOfNoncancer(X) :- deathCause(X,Y), ~hasDoid(Y) - final NegativeLiteral hasNotDoid = Expressions.makeNegativeLiteral("hasDoid", y); - reasoner.addRules(Expressions.makeRule(Expressions.makePositiveConjunction(humansWhoDiedOfNoncancer), - Expressions.makeConjunction(deathCause, hasNotDoid))); - System.out.println("Rules configured:\n--"); - reasoner.getRules().forEach(System.out::println); + kb.getRules().forEach(System.out::println); System.out.println("--"); reasoner.load(); System.out.println("Loading completed."); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java index cd38ee150..5978bcc99 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java @@ -44,6 +44,7 @@ import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.KnowledgeBaseImpl; import org.semanticweb.vlog4j.examples.ExamplesUtils; /** @@ -133,8 +134,10 @@ public static void main(final String[] args) * 2. Loading, reasoning, and querying while using try-with-resources to close * the reasoner automatically. */ - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { - reasoner.addRules(rule1, rule2, rule3, rule4, rule5, rule6, rule7, rule8); + final KnowledgeBase kb = new KnowledgeBaseImpl(); + kb.addRules(rule1, rule2, rule3, rule4, rule5, rule6, rule7, rule8); + + try (final Reasoner reasoner = Reasoner.getInstance(kb)) { /* Importing {@code .csv} files as data sources. */ final DataSource bicycleEDBDataSource = new CsvFileDataSource( diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java index cb70270f0..075d92ad7 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java @@ -43,6 +43,7 @@ import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; +import org.semanticweb.vlog4j.core.reasoner.implementation.KnowledgeBaseImpl; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.examples.ExamplesUtils; @@ -133,8 +134,10 @@ public static void main(final String[] args) * 2. Loading, reasoning, querying and exporting, while using try-with-resources * to close the reasoner automatically. */ - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { - reasoner.addRules(rule1, rule2, rule3, rule4, rule5); + final KnowledgeBase kb = new KnowledgeBaseImpl(); + kb.addRules(rule1, rule2, rule3, rule4, rule5); + + try (final Reasoner reasoner = Reasoner.getInstance(kb)) { /* Importing {@code .nt.gz} file as data source. */ final DataSource triplesEDBDataSource = new RdfFileDataSource( diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java index 59a6256c1..1300dcb5b 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java @@ -39,6 +39,7 @@ import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; +import org.semanticweb.vlog4j.core.reasoner.implementation.KnowledgeBaseImpl; import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; @@ -121,7 +122,9 @@ public static void main(final String[] args) final Predicate titleOfPublicationThatHasAuthorsWhoParentTheSameChild = Expressions .makePredicate("publicationAndAuthorsWhoParentTheSameChild", 3); - try (Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { + final KnowledgeBase kb = new KnowledgeBaseImpl(); + + try (Reasoner reasoner = Reasoner.getInstance(kb)) { /* * The SPARQL query results will be added to the reasoner knowledge base, as @@ -169,7 +172,8 @@ public static void main(final String[] args) * data added from the WikiData SPARQL query result. */ reasoner.resetReasoner(); - reasoner.addRules(rule); + + kb.addRules(rule); reasoner.load(); reasoner.reason(); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java index 1bf960fb4..98763f51f 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java @@ -42,6 +42,7 @@ import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; +import org.semanticweb.vlog4j.core.reasoner.implementation.KnowledgeBaseImpl; /** * This class exemplifies setting a log file and log level for VLog reasoner @@ -96,9 +97,11 @@ public class ConfigureReasonerLogging { public static void main(final String[] args) throws EdbIdbSeparationException, IncompatiblePredicateArityException, IOException, ReasonerStateException { - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { + final KnowledgeBase kb = new KnowledgeBaseImpl(); + kb.addRules(rules); + + try (final Reasoner reasoner = Reasoner.getInstance(kb)) { - reasoner.addRules(rules); reasoner.addFacts(fact); /* diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java index 6b0956ad4..15d844ff2 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java @@ -34,6 +34,7 @@ import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; +import org.semanticweb.vlog4j.core.reasoner.implementation.KnowledgeBaseImpl; import org.semanticweb.vlog4j.examples.ExamplesUtils; /** @@ -122,9 +123,11 @@ public static void main(final String[] args) * 2. Loading, reasoning, and querying. Use try-with resources, or remember to * call close() to free the reasoner resources. */ - try (Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { + final KnowledgeBase kb = new KnowledgeBaseImpl(); + kb.addRules(rule1, rule2, rule3, rule4, rule5, rule6, rule7, rule8); + + try (Reasoner reasoner = Reasoner.getInstance(kb)) { - reasoner.addRules(rule1, rule2, rule3, rule4, rule5, rule6, rule7, rule8); reasoner.addFacts(fact1, fact2, fact3, fact4); reasoner.load(); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java index df8a884dc..8cdd0a459 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java @@ -32,6 +32,7 @@ import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; +import org.semanticweb.vlog4j.core.reasoner.implementation.KnowledgeBaseImpl; import org.semanticweb.vlog4j.examples.ExamplesUtils; import org.semanticweb.vlog4j.graal.GraalConjunctiveQueryToRule; import org.semanticweb.vlog4j.graal.GraalToVLog4JModelConverter; @@ -104,12 +105,14 @@ public static void main(final String[] args) * 3. Loading, reasoning, and querying while using try-with-resources to close * the reasoner automatically. */ - try (Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { - reasoner.addRules(GraalToVLog4JModelConverter.convertRules(graalRules)); + final KnowledgeBase kb = new KnowledgeBaseImpl(); + kb.addRules(GraalToVLog4JModelConverter.convertRules(graalRules)); + for (final GraalConjunctiveQueryToRule graalConjunctiveQueryToRule : convertedConjunctiveQueries) { + kb.addRules(graalConjunctiveQueryToRule.getRule()); + } + + try (Reasoner reasoner = Reasoner.getInstance(kb)) { reasoner.addFacts(GraalToVLog4JModelConverter.convertAtoms(graalAtoms)); - for (final GraalConjunctiveQueryToRule graalConjunctiveQueryToRule : convertedConjunctiveQueries) { - reasoner.addRules(graalConjunctiveQueryToRule.getRule()); - } reasoner.load(); System.out.println("Before materialisation:"); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java index be80b72f8..033da03e0 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java @@ -30,6 +30,7 @@ import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; +import org.semanticweb.vlog4j.core.reasoner.implementation.KnowledgeBaseImpl; import org.semanticweb.vlog4j.examples.ExamplesUtils; import org.semanticweb.vlog4j.graal.GraalConjunctiveQueryToRule; import org.semanticweb.vlog4j.graal.GraalToVLog4JModelConverter; @@ -124,9 +125,11 @@ public static void main(final String[] args) * 4. Loading, reasoning, and querying while using try-with-resources to close * the reasoner automatically. */ - try (Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { - reasoner.addRules(GraalToVLog4JModelConverter.convertRules(graalRules)); - reasoner.addRules(convertedGraalConjunctiveQuery.getRule()); + final KnowledgeBase kb = new KnowledgeBaseImpl(); + kb.addRules(GraalToVLog4JModelConverter.convertRules(graalRules)); + kb.addRules(convertedGraalConjunctiveQuery.getRule()); + + try (Reasoner reasoner = Reasoner.getInstance(kb)) { reasoner.addFacts(GraalToVLog4JModelConverter.convertAtoms(graalAtoms)); reasoner.load(); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java index 742317f17..c159d8ebc 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java @@ -40,6 +40,7 @@ import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; +import org.semanticweb.vlog4j.core.reasoner.implementation.KnowledgeBaseImpl; import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.examples.ExamplesUtils; import org.semanticweb.vlog4j.owlapi.OwlToRulesConverter; @@ -85,9 +86,11 @@ public static void main(final String[] args) throws OWLOntologyCreationException } System.out.println(); - try (Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { + final KnowledgeBase kb = new KnowledgeBaseImpl(); + kb.addRules(new ArrayList<>(owlToRulesConverter.getRules())); + + try (Reasoner reasoner = Reasoner.getInstance(kb)) { /* Load rules and facts obtained from the ontology */ - reasoner.addRules(new ArrayList<>(owlToRulesConverter.getRules())); reasoner.addFacts(owlToRulesConverter.getFacts()); reasoner.load(); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java index e72533712..207c4d160 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java @@ -49,6 +49,7 @@ import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; +import org.semanticweb.vlog4j.core.reasoner.implementation.KnowledgeBaseImpl; import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.examples.ExamplesUtils; import org.semanticweb.vlog4j.rdf.RdfModelConverter; @@ -146,18 +147,20 @@ public static void main(final String[] args) throws IOException, RDFParseExcepti final Rule organizationRule = Expressions.makeRule(creatorOrganizationName, personHasAffiliation, affiliationWithOrganization, organizationHasName); - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase());) { + final KnowledgeBase kb = new KnowledgeBaseImpl(); + /* + * The rule that maps people to their organization name based on facts extracted + * from RDF triples is added to the Reasoner's knowledge base. + */ + kb.addRules(organizationRule); + + try (final Reasoner reasoner = Reasoner.getInstance(kb);) { /* * Facts extracted from the RDF resources are added to the Reasoner's knowledge * base. */ reasoner.addFacts(tripleFactsISWC2016); reasoner.addFacts(tripleFactsISWC2017); - /* - * The rule that maps people to their organization name based on facts extracted - * from RDF triples is added to the Reasoner's knowledge base. - */ - reasoner.addRules(organizationRule); reasoner.load(); reasoner.reason(); diff --git a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java b/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java index 57665eeab..fd91e4032 100644 --- a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java +++ b/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java @@ -49,6 +49,7 @@ import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; +import org.semanticweb.vlog4j.core.reasoner.implementation.KnowledgeBaseImpl; import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; public class TestReasonOverRdfFacts { @@ -70,7 +71,9 @@ public void testCanLoadRdfFactsIntoReasoner() throws RDFParseException, RDFHandl RDFFormat.TURTLE); final Set facts = RdfModelConverter.rdfModelToPositiveLiterals(model); - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { + final KnowledgeBase kb = new KnowledgeBaseImpl(); + + try (final Reasoner reasoner = Reasoner.getInstance(kb)) { reasoner.addFacts(facts); reasoner.load(); @@ -88,7 +91,9 @@ public void testQueryAnsweringOverRdfFacts() throws RDFParseException, RDFHandle RDFFormat.TURTLE); final Set facts = RdfModelConverter.rdfModelToPositiveLiterals(model); - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBase())) { + final KnowledgeBase kb = new KnowledgeBaseImpl(); + + try (final Reasoner reasoner = Reasoner.getInstance(kb)) { reasoner.addFacts(facts); reasoner.load(); From 299bf4eef428a372e88fa60270d8b4d29b7bea4f Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Wed, 8 May 2019 16:46:17 +0200 Subject: [PATCH 0010/1003] moved adding facts to KnowledgeBase --- .../vlog4j/core/reasoner/KnowledgeBase.java | 67 +++++++ .../vlog4j/core/reasoner/Reasoner.java | 85 ++------ .../implementation/KnowledgeBaseImpl.java | 56 ------ .../implementation/VLogKnowledgeBase.java | 184 ++++++++++++++++++ .../reasoner/implementation/VLogReasoner.java | 152 +++------------ .../vlog4j/core/reasoner/LoggingTest.java | 114 +++++------ .../core/reasoner/ReasonerTimeoutTest.java | 73 ++++--- .../implementation/AddDataSourceTest.java | 84 ++++---- .../implementation/AnswerQueryTest.java | 32 +-- .../ExportQueryAnswersToCsvFileTest.java | 12 +- .../FileDataSourceTestUtils.java | 7 +- .../GeneratedAnonymousIndividualsTest.java | 60 +++--- .../LoadDataFromCsvFileTest.java | 22 +-- .../LoadDataFromMemoryTest.java | 45 +++-- .../LoadDataFromRdfFileTest.java | 20 +- .../LoadDataFromSparqlQueryTest.java | 39 ++-- .../implementation/ReasonerStateTest.java | 140 ++++++------- .../reasoner/implementation/ReasonerTest.java | 19 +- .../StratifiedNegationTest.java | 56 +++--- .../vlog4j/examples/DoidExample.java | 66 +++---- .../examples/core/AddDataFromCsvFile.java | 23 +-- .../examples/core/AddDataFromRdfFile.java | 18 +- .../core/AddDataFromSparqlQueryResults.java | 8 +- .../core/ConfigureReasonerLogging.java | 10 +- .../SkolemVsRestrictedChaseTermination.java | 11 +- .../examples/graal/AddDataFromDlgpFile.java | 15 +- .../examples/graal/AddDataFromGraal.java | 12 +- .../owlapi/OwlOntologyToRulesAndFacts.java | 11 +- .../examples/rdf/AddDataFromRdfModel.java | 22 +-- .../vlog4j/rdf/TestReasonOverRdfFacts.java | 6 +- 30 files changed, 729 insertions(+), 740 deletions(-) delete mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/KnowledgeBaseImpl.java create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java index 981ecf4f7..a3dc29f52 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -1,12 +1,17 @@ package org.semanticweb.vlog4j.core.reasoner; +import java.util.Collection; import java.util.List; import java.util.Observable; import org.eclipse.jdt.annotation.NonNull; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.TermType; +import karmaresearch.vlog.Atom; + /*- * #%L * VLog4j Core Components @@ -64,5 +69,67 @@ public abstract class KnowledgeBase extends Observable { * @return list of {@link Rule} */ public abstract List getRules(); + + /** + * Adds non-null facts to the knowledge base. A fact is a + * {@link PositiveLiteral} with all terms ({@link PositiveLiteral#getTerms()}) + * of type {@link TermType#CONSTANT}.
+ * Facts predicates ({@link PositiveLiteral#getPredicate()}) cannot have + * multiple data sources. + * + * @param facts facts to be added to the knowledge base. The given order + * is not maintained. + * @throws IllegalArgumentException if the knowledge base contains facts + * from a data source with the same predicate + * ({@link PositiveLiteral#getPredicate()}) as + * a {@link PositiveLiteral} among given + * {@code facts}. + * @throws IllegalArgumentException if the {@code facts} literals contain terms + * which are not of type + * {@link TermType#CONSTANT}. + */ + // TODO add examples to javadoc about multiple sources per predicate and EDB/IDB + public abstract void addFacts(@NonNull PositiveLiteral... facts); + + /** + * Adds non-null facts to the knowledge base. A fact is a + * {@link PositiveLiteral} with all terms ({@link PositiveLiteral#getTerms()}) + * of type {@link TermType#CONSTANT}.
+ * Facts predicates ({@link PositiveLiteral#getPredicate()}) cannot have + * multiple data sources. + * + * @param facts facts to be added to the knowledge base. + * @throws IllegalArgumentException if the knowledge base contains facts + * from a data source with the same predicate + * ({@link PositiveLiteral#getPredicate()}) as + * an {@link PositiveLiteral} among given + * {@code facts}. + * @throws IllegalArgumentException if the {@code facts} literals contain terms + * which are not of type + * {@link TermType#CONSTANT}. + */ + // TODO add examples to javadoc about multiple sources per predicate and EDB/IDB + public abstract void addFacts(@NonNull Collection facts); + + /** + * Adds facts stored in given {@code dataSource} for given {@code predicate} to + * the knowledge base. Facts predicates cannot have multiple + * data sources, including in-memory {@link Atom} objects added trough + * {@link #addFacts}. + * + * @param predicate the {@link Predicate} for which the given + * {@code dataSource} contains fact terms. + * @param dataSource data source containing the fact terms to be associated to + * given predicate and added to the reasoner + * @throws IllegalArgumentException if the knowledge base contains facts + * in memory (added using {@link #addFacts}) or + * from a data source with the same + * {@link Predicate} as given + * {@code predicate}. + */ + // TODO add example to javadoc with two datasources and with in-memory facts for + // the same predicate. + // TODO validate predicate arity corresponds to the dataSource facts arity + public abstract void addFactsFromDataSource(@NonNull Predicate predicate, @NonNull DataSource dataSource); } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 0c81868a9..63e11fa7e 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -1,7 +1,6 @@ package org.semanticweb.vlog4j.core.reasoner; import java.io.IOException; -import java.util.Collection; import java.util.Observer; import org.eclipse.jdt.annotation.NonNull; @@ -15,6 +14,7 @@ import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogKnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; import karmaresearch.vlog.Atom; @@ -93,14 +93,20 @@ public interface Reasoner extends AutoCloseable, Observer { /** - * Factory method that to instantiate a Reasoner with given knowledge base. - * @param knowledgeBase Knowledge Base containing data associated to the reasoner + * Factory method that to instantiate a Reasoner with an empty knowledge base. * * @return a {@link VLogReasoner} instance. */ - public static Reasoner getInstance(KnowledgeBase knowledgeBase) { + public static Reasoner getInstance() { + final VLogKnowledgeBase knowledgeBase= new VLogKnowledgeBase(); return new VLogReasoner(knowledgeBase); } + + /** + * Getter for the knowledge base to reason on. + * @return the reasoner's knowledge base + */ + KnowledgeBase getKnowledgeBase(); /** * Sets the algorithm that will be used for reasoning over the knowledge base. @@ -198,76 +204,7 @@ public static Reasoner getInstance(KnowledgeBase knowledgeBase) { void setLogFile(@Nullable String filePath) throws ReasonerStateException; - /** - * Adds non-null facts to the reasoner knowledge base. A fact is a - * {@link PositiveLiteral} with all terms ({@link PositiveLiteral#getTerms()}) - * of type {@link TermType#CONSTANT}.
- * Facts can only be added before loading ({@link #load()}).
- * Facts predicates ({@link PositiveLiteral#getPredicate()}) cannot have - * multiple data sources. - * - * @param facts facts to be added to the knowledge base. The given order - * is not maintained. - * @throws ReasonerStateException if the reasoner has already been loaded - * ({@link #load()}). - * @throws IllegalArgumentException if the knowledge base contains facts - * from a data source with the same predicate - * ({@link PositiveLiteral#getPredicate()}) as - * a {@link PositiveLiteral} among given - * {@code facts}. - * @throws IllegalArgumentException if the {@code facts} literals contain terms - * which are not of type - * {@link TermType#CONSTANT}. - */ - // TODO add examples to javadoc about multiple sources per predicate and EDB/IDB - void addFacts(@NonNull PositiveLiteral... facts) throws ReasonerStateException; - - /** - * Adds non-null facts to the reasoner knowledge base. A fact is a - * {@link PositiveLiteral} with all terms ({@link PositiveLiteral#getTerms()}) - * of type {@link TermType#CONSTANT}.
- * Facts can only be added before loading ({@link #load()}).
- * Facts predicates ({@link PositiveLiteral#getPredicate()}) cannot have - * multiple data sources. - * - * @param facts facts to be added to the knowledge base. - * @throws ReasonerStateException if the reasoner has already been loaded - * ({@link #load()}). - * @throws IllegalArgumentException if the knowledge base contains facts - * from a data source with the same predicate - * ({@link PositiveLiteral#getPredicate()}) as - * an {@link PositiveLiteral} among given - * {@code facts}. - * @throws IllegalArgumentException if the {@code facts} literals contain terms - * which are not of type - * {@link TermType#CONSTANT}. - */ - // TODO add examples to javadoc about multiple sources per predicate and EDB/IDB - void addFacts(@NonNull Collection facts) throws ReasonerStateException; - - /** - * Adds facts stored in given {@code dataSource} for given {@code predicate} to - * the reasoner knowledge base. Facts predicates cannot have multiple - * data sources, including in-memory {@link Atom} objects added trough - * {@link #addFacts}. - * - * @param predicate the {@link Predicate} for which the given - * {@code dataSource} contains fact terms. - * @param dataSource data source containing the fact terms to be associated to - * given predicate and added to the reasoner - * @throws ReasonerStateException if the reasoner has already been loaded - * ({@link #load()}). - * @throws IllegalArgumentException if the knowledge base contains facts - * in memory (added using {@link #addFacts}) or - * from a data source with the same - * {@link Predicate} as given - * {@code predicate}. - */ - // TODO add example to javadoc with two datasources and with in-memory facts for - // the same predicate. - // TODO validate predicate arity corresponds to the dataSource facts arity - void addFactsFromDataSource(@NonNull Predicate predicate, @NonNull DataSource dataSource) - throws ReasonerStateException; + /** * Loads the knowledge base, consisting of the current rules and facts, diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/KnowledgeBaseImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/KnowledgeBaseImpl.java deleted file mode 100644 index 93dd8477b..000000000 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/KnowledgeBaseImpl.java +++ /dev/null @@ -1,56 +0,0 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 - 2019 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; - -import org.apache.commons.lang3.Validate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; - -public class KnowledgeBaseImpl extends KnowledgeBase { - - private final List rules = new ArrayList<>(); - - @Override - public void addRules(final Rule... rules) { - addRules(Arrays.asList(rules)); - } - - @Override - public void addRules(final List rules) { - Validate.noNullElements(rules, "Null rules are not alowed! The list contains a null at position [%d]."); - this.rules.addAll(new ArrayList<>(rules)); - - - // TODO setChanged - // TODO notify listeners with the diff - } - - @Override - public List getRules() { - return Collections.unmodifiableList(this.rules); - } - -} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java new file mode 100644 index 000000000..ea95b24c6 --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java @@ -0,0 +1,184 @@ +package org.semanticweb.vlog4j.core.reasoner.implementation; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Formatter; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.apache.commons.lang3.Validate; +import org.semanticweb.vlog4j.core.model.api.Literal; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.reasoner.DataSource; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; + +public class VLogKnowledgeBase extends KnowledgeBase { + + private final List rules = new ArrayList<>(); + private final Map> factsForPredicate = new HashMap<>(); + private final Map dataSourceForPredicate = new HashMap<>(); + + + + @Override + public void addRules(final Rule... rules) { + addRules(Arrays.asList(rules)); + } + + @Override + public void addRules(final List rules) { + Validate.noNullElements(rules, "Null rules are not alowed! The list contains a null at position [%d]."); + this.rules.addAll(new ArrayList<>(rules)); + + // TODO setChanged + // TODO notify listeners with the diff + } + + @Override + public List getRules() { + return Collections.unmodifiableList(this.rules); + } + + @Override + public void addFacts(final PositiveLiteral... facts) { + addFacts(Arrays.asList(facts)); + + // TODO setChanged + // TODO notify listeners with the diff + } + + @Override + public void addFacts(final Collection facts) { + Validate.noNullElements(facts, "Null facts are not alowed! The list contains a fact at position [%d]."); + for (final PositiveLiteral fact : facts) { + validateFactTermsAreConstant(fact); + + final Predicate predicate = fact.getPredicate(); + validateNoDataSourceForPredicate(predicate); + + this.factsForPredicate.putIfAbsent(predicate, new HashSet<>()); + this.factsForPredicate.get(predicate).add(fact); + } + } + + @Override + public void addFactsFromDataSource(final Predicate predicate, final DataSource dataSource) { + Validate.notNull(predicate, "Null predicates are not allowed!"); + Validate.notNull(dataSource, "Null dataSources are not allowed!"); + validateNoDataSourceForPredicate(predicate); + Validate.isTrue(!this.factsForPredicate.containsKey(predicate), + "Multiple data sources for the same predicate are not allowed! Facts for predicate [%s] alredy added in memory: %s", + predicate, this.factsForPredicate.get(predicate)); + + this.dataSourceForPredicate.put(predicate, dataSource); + } + + boolean hasFacts() { + return !this.dataSourceForPredicate.isEmpty() || !this.factsForPredicate.isEmpty(); + } + + Map getDataSourceForPredicate() { + return this.dataSourceForPredicate; + } + + Map> getFactsForPredicate() { + return this.factsForPredicate; + } + + Set getEdbPredicates() { + // TODO use chache + return collectEdbPredicates(); + } + + Set getIdbPredicates() { + // TODO use chache + return collectIdbPredicates(); + } + + String generateDataSourcesConfig() { + final StringBuilder configStringBuilder = new StringBuilder(); + int dataSourceIndex = 0; + for (final Predicate predicate : this.dataSourceForPredicate.keySet()) { + final DataSource dataSource = this.dataSourceForPredicate.get(predicate); + try (final Formatter formatter = new Formatter(configStringBuilder);) { + formatter.format(dataSource.toConfigString(), dataSourceIndex, + ModelToVLogConverter.toVLogPredicate(predicate)); + } + dataSourceIndex++; + } + return configStringBuilder.toString(); + } + + void validateEdbIdbSeparation() throws EdbIdbSeparationException { + final Set edbPredicates = getEdbPredicates(); + final Set idbPredicates = getIdbPredicates(); + final Set intersection = new HashSet<>(edbPredicates); + intersection.retainAll(idbPredicates); + if (!intersection.isEmpty()) { + throw new EdbIdbSeparationException(intersection); + } + } + + + private void validateFactTermsAreConstant(PositiveLiteral fact) { + final Set nonConstantTerms = new HashSet<>(fact.getTerms()); + nonConstantTerms.removeAll(fact.getConstants()); + Validate.isTrue(nonConstantTerms.isEmpty(), + "Only Constant terms alowed in Fact literals! The following non-constant terms [%s] appear for fact [%s]!", + nonConstantTerms, fact); + + } + + private void validateNoDataSourceForPredicate(final Predicate predicate) { + Validate.isTrue(!this.dataSourceForPredicate.containsKey(predicate), + "Multiple data sources for the same predicate are not allowed! Facts for predicate [%s] alredy added from data source: %s", + predicate, this.dataSourceForPredicate.get(predicate)); + } + + private Set collectEdbPredicates() { + final Set edbPredicates = new HashSet<>(); + edbPredicates.addAll(this.dataSourceForPredicate.keySet()); + edbPredicates.addAll(this.factsForPredicate.keySet()); + return edbPredicates; + } + + private Set collectIdbPredicates() { + final Set idbPredicates = new HashSet<>(); + for (final Rule rule : this.rules) { + for (final Literal headAtom : rule.getHead()) { + idbPredicates.add(headAtom.getPredicate()); + } + } + return idbPredicates; + } + +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 608c5b4be..beb960ab5 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -1,21 +1,13 @@ package org.semanticweb.vlog4j.core.reasoner.implementation; import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; -import java.util.Formatter; -import java.util.HashMap; -import java.util.HashSet; import java.util.Map; import java.util.Observable; import java.util.Set; import org.apache.commons.lang3.Validate; -import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.reasoner.AcyclicityNotion; import org.semanticweb.vlog4j.core.reasoner.Algorithm; import org.semanticweb.vlog4j.core.reasoner.CyclicityResult; @@ -63,7 +55,7 @@ public class VLogReasoner implements Reasoner { private static Logger LOGGER = LoggerFactory.getLogger(VLogReasoner.class); - private final KnowledgeBase knowledgeBase; + private final VLogKnowledgeBase knowledgeBase; private final VLog vLog = new VLog(); private ReasonerState reasonerState = ReasonerState.BEFORE_LOADING; @@ -73,20 +65,22 @@ public class VLogReasoner implements Reasoner { private Integer timeoutAfterSeconds; private RuleRewriteStrategy ruleRewriteStrategy = RuleRewriteStrategy.NONE; - private final Map> factsForPredicate = new HashMap<>(); - private final Map dataSourceForPredicate = new HashMap<>(); + /** + * Holds the state of the reasoning result. Has value {@code true} if reasoning + * has completed, {@code false} if it has been interrupted. + */ + private boolean reasoningCompleted; - public VLogReasoner(KnowledgeBase knowledgeBase) { + public VLogReasoner(VLogKnowledgeBase knowledgeBase) { super(); this.knowledgeBase = knowledgeBase; this.knowledgeBase.addObserver(this); } - /** - * Holds the state of the reasoning result. Has value {@code true} if reasoning - * has completed, {@code false} if it has been interrupted. - */ - private boolean reasoningCompleted; + @Override + public KnowledgeBase getKnowledgeBase() { + return knowledgeBase; + } @Override public void setAlgorithm(final Algorithm algorithm) { @@ -132,65 +126,6 @@ public RuleRewriteStrategy getRuleRewriteStrategy() { return this.ruleRewriteStrategy; } - @Override - public void addFacts(final PositiveLiteral... facts) throws ReasonerStateException { - addFacts(Arrays.asList(facts)); - } - - @Override - public void addFacts(final Collection facts) throws ReasonerStateException { - if (this.reasonerState != ReasonerState.BEFORE_LOADING) { - throw new ReasonerStateException(this.reasonerState, - "Facts cannot be added after the reasoner has been loaded! Call reset() to undo loading and reasoning."); - } - Validate.noNullElements(facts, "Null facts are not alowed! The list contains a fact at position [%d]."); - for (final PositiveLiteral fact : facts) { - validateFactTermsAreConstant(fact); - - final Predicate predicate = fact.getPredicate(); - validateNoDataSourceForPredicate(predicate); - - this.factsForPredicate.putIfAbsent(predicate, new HashSet<>()); - this.factsForPredicate.get(predicate).add(fact); - } - if (reasonerState.equals(ReasonerState.AFTER_CLOSING)) - LOGGER.warn("Adding facts to a closed reasoner."); - } - - @Override - public void addFactsFromDataSource(final Predicate predicate, final DataSource dataSource) - throws ReasonerStateException { - if (this.reasonerState != ReasonerState.BEFORE_LOADING) { - throw new ReasonerStateException(this.reasonerState, - "Data sources cannot be added after the reasoner has been loaded! Call reset() to undo loading and reasoning."); - } - Validate.notNull(predicate, "Null predicates are not allowed!"); - Validate.notNull(dataSource, "Null dataSources are not allowed!"); - validateNoDataSourceForPredicate(predicate); - Validate.isTrue(!this.factsForPredicate.containsKey(predicate), - "Multiple data sources for the same predicate are not allowed! Facts for predicate [%s] alredy added in memory: %s", - predicate, this.factsForPredicate.get(predicate)); - - this.dataSourceForPredicate.put(predicate, dataSource); - if (reasonerState.equals(ReasonerState.AFTER_CLOSING)) - LOGGER.warn("Adding facts to a closed reasoner."); - } - - private void validateFactTermsAreConstant(PositiveLiteral fact) { - final Set nonConstantTerms = new HashSet<>(fact.getTerms()); - nonConstantTerms.removeAll(fact.getConstants()); - Validate.isTrue(nonConstantTerms.isEmpty(), - "Only Constant terms alowed in Fact literals! The following non-constant terms [%s] appear for fact [%s]!", - nonConstantTerms, fact); - - } - - private void validateNoDataSourceForPredicate(final Predicate predicate) { - Validate.isTrue(!this.dataSourceForPredicate.containsKey(predicate), - "Multiple data sources for the same predicate are not allowed! Facts for predicate [%s] alredy added from data source: %s", - predicate, this.dataSourceForPredicate.get(predicate)); - } - @Override public void load() throws EdbIdbSeparationException, IOException, IncompatiblePredicateArityException, ReasonerStateException { @@ -199,16 +134,16 @@ public void load() if (this.reasonerState != ReasonerState.BEFORE_LOADING) { LOGGER.warn("This method call is ineffective: the Reasoner has already been loaded."); } else { - validateEdbIdbSeparation(); + this.knowledgeBase.validateEdbIdbSeparation(); this.reasonerState = ReasonerState.AFTER_LOADING; - if (this.dataSourceForPredicate.isEmpty() && this.factsForPredicate.isEmpty()) { + if (!this.knowledgeBase.hasFacts()) { LOGGER.warn("No facts have been provided."); } try { - this.vLog.start(generateDataSourcesConfig(), false); + this.vLog.start(this.knowledgeBase.generateDataSourcesConfig(), false); } catch (final AlreadyStartedException e) { throw new RuntimeException("Inconsistent reasoner state.", e); } catch (final EDBConfigurationException e) { @@ -230,7 +165,8 @@ public void load() } private void validateDataSourcePredicateArities() throws IncompatiblePredicateArityException { - for (final Predicate predicate : this.dataSourceForPredicate.keySet()) { + final Map dataSourceForPredicate = this.knowledgeBase.getDataSourceForPredicate(); + for (final Predicate predicate : dataSourceForPredicate.keySet()) { final int dataSourcePredicateArity; try { dataSourcePredicateArity = this.vLog.getPredicateArity(ModelToVLogConverter.toVLogPredicate(predicate)); @@ -238,11 +174,11 @@ private void validateDataSourcePredicateArities() throws IncompatiblePredicateAr throw new RuntimeException("Inconsistent reasoner state.", e); } if (dataSourcePredicateArity == -1) { - LOGGER.warn("Data source {} for predicate {} is empty: ", this.dataSourceForPredicate.get(predicate), + LOGGER.warn("Data source {} for predicate {} is empty: ", dataSourceForPredicate.get(predicate), predicate); } else if (predicate.getArity() != dataSourcePredicateArity) { throw new IncompatiblePredicateArityException(predicate, dataSourcePredicateArity, - this.dataSourceForPredicate.get(predicate)); + dataSourceForPredicate.get(predicate)); } } @@ -339,54 +275,13 @@ public void close() { this.vLog.stop(); } - private void validateEdbIdbSeparation() throws EdbIdbSeparationException { - final Set edbPredicates = collectEdbPredicates(); - final Set idbPredicates = collectIdbPredicates(); - final Set intersection = new HashSet<>(edbPredicates); - intersection.retainAll(idbPredicates); - - if (!intersection.isEmpty()) { - throw new EdbIdbSeparationException(intersection); - } - } - - private Set collectEdbPredicates() { - final Set edbPredicates = new HashSet<>(); - edbPredicates.addAll(this.dataSourceForPredicate.keySet()); - edbPredicates.addAll(this.factsForPredicate.keySet()); - return edbPredicates; - } - - private Set collectIdbPredicates() { - final Set idbPredicates = new HashSet<>(); - for (final Rule rule : this.knowledgeBase.getRules()) { - for (final Literal headAtom : rule.getHead()) { - idbPredicates.add(headAtom.getPredicate()); - } - } - return idbPredicates; - } - - String generateDataSourcesConfig() { - final StringBuilder configStringBuilder = new StringBuilder(); - int dataSourceIndex = 0; - for (final Predicate predicate : this.dataSourceForPredicate.keySet()) { - final DataSource dataSource = this.dataSourceForPredicate.get(predicate); - try (final Formatter formatter = new Formatter(configStringBuilder);) { - formatter.format(dataSource.toConfigString(), dataSourceIndex, - ModelToVLogConverter.toVLogPredicate(predicate)); - } - dataSourceIndex++; - } - return configStringBuilder.toString(); - } - private void loadInMemoryFacts() { - for (final Predicate predicate : this.factsForPredicate.keySet()) { - final Set factsForPredicate = this.factsForPredicate.get(predicate); + final Map> factsForPredicate = this.knowledgeBase.getFactsForPredicate(); + for (final Predicate predicate : factsForPredicate.keySet()) { + final Set facts = factsForPredicate.get(predicate); final String vLogPredicate = ModelToVLogConverter.toVLogPredicate(predicate); - final String[][] tuplesForPredicate = ModelToVLogConverter.toVLogFactTuples(factsForPredicate); + final String[][] tuplesForPredicate = ModelToVLogConverter.toVLogFactTuples(facts); try { this.vLog.addData(vLogPredicate, tuplesForPredicate); } catch (final EDBConfigurationException e) { @@ -396,7 +291,8 @@ private void loadInMemoryFacts() { } private void loadRules() { - final karmaresearch.vlog.Rule[] vLogRuleArray = ModelToVLogConverter.toVLogRuleArray(this.knowledgeBase.getRules()); + final karmaresearch.vlog.Rule[] vLogRuleArray = ModelToVLogConverter + .toVLogRuleArray(this.knowledgeBase.getRules()); final karmaresearch.vlog.VLog.RuleRewriteStrategy vLogRuleRewriteStrategy = ModelToVLogConverter .toVLogRuleRewriteStrategy(this.ruleRewriteStrategy); try { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java index 6b76d1fe7..603ab18fb 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java @@ -39,7 +39,8 @@ import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.reasoner.implementation.KnowledgeBaseImpl; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogKnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; public class LoggingTest { @@ -54,40 +55,42 @@ public class LoggingTest { private static final Constant constantC = Expressions.makeConstant("c"); private static final PositiveLiteral factPc = Expressions.makePositiveLiteral("p", constantC); + private static final VLogKnowledgeBase kb = new VLogKnowledgeBase(); + + static { + kb.addRules(rule); + kb.addFacts(factPc); + } + // TODO remaining tests: change log file // TODO remaining tests: test that the log level and the log files can be set // any time @Test - public void testSetLogFileNull() throws ReasonerStateException, IOException, EdbIdbSeparationException, IncompatiblePredicateArityException { - final KnowledgeBase kb = new KnowledgeBaseImpl(); - kb.addRules(rule); - - try (final Reasoner instance = Reasoner.getInstance(kb)) { - instance.setLogFile(null); - instance.setLogLevel(LogLevel.INFO); - - instance.addFacts(factPc); - instance.load(); - instance.reason(); + public void testSetLogFileNull() + throws ReasonerStateException, IOException, EdbIdbSeparationException, IncompatiblePredicateArityException { + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.setLogFile(null); + reasoner.setLogLevel(LogLevel.INFO); + + reasoner.load(); + reasoner.reason(); } // TODO test that logging is redirected to system output } @Test - public void testSetLogFileInexistent() throws ReasonerStateException, IOException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public void testSetLogFileInexistent() + throws ReasonerStateException, IOException, EdbIdbSeparationException, IncompatiblePredicateArityException { final String inexistentFilePath = LOGS_FOLDER + "a/b"; - final KnowledgeBase kb = new KnowledgeBaseImpl(); - kb.addRules(rule); - try (final Reasoner instance = Reasoner.getInstance(kb)) { - instance.setLogFile(inexistentFilePath); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.setLogFile(inexistentFilePath); assertFalse(new File(inexistentFilePath).exists()); - instance.setLogLevel(LogLevel.INFO); + reasoner.setLogLevel(LogLevel.INFO); - instance.addFacts(factPc); - instance.load(); - instance.reason(); + reasoner.load(); + reasoner.reason(); } // TODO test that logging is redirected to system output assertFalse(new File(inexistentFilePath).exists()); @@ -95,33 +98,30 @@ public void testSetLogFileInexistent() throws ReasonerStateException, IOExceptio @Test(expected = NullPointerException.class) public void testSetLogLevelNull() throws ReasonerStateException { - try (final Reasoner instance = Reasoner.getInstance(new KnowledgeBaseImpl())) { + try (final Reasoner instance = Reasoner.getInstance()) { instance.setLogLevel(null); } } @Test - public void testSetLogFileAppendsToFile() throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + public void testSetLogFileAppendsToFile() + throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { final String logFilePath = LOGS_FOLDER + System.currentTimeMillis() + "-testSetLogFileAppendsToFile.log"; assertFalse(new File(logFilePath).exists()); int countLinesBeforeReset = 0; - final KnowledgeBase kb = new KnowledgeBaseImpl(); - kb.addRules(rule); - - try (final Reasoner instance = Reasoner.getInstance(kb)) { - instance.addFacts(factPc); - instance.setLogLevel(LogLevel.INFO); - instance.setLogFile(logFilePath); - instance.load(); - instance.reason(); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.setLogLevel(LogLevel.INFO); + reasoner.setLogFile(logFilePath); + reasoner.load(); + reasoner.reason(); countLinesBeforeReset = readFile(logFilePath); assertTrue(countLinesBeforeReset > 0); - instance.resetReasoner(); - instance.load(); - instance.reason(); + reasoner.resetReasoner(); + reasoner.load(); + reasoner.reason(); } final int countLinesAfterReset = readFile(logFilePath); // the logger appends to the same file after reset @@ -130,22 +130,19 @@ public void testSetLogFileAppendsToFile() throws EdbIdbSeparationException, IOEx } @Test - public void testLogLevelInfo() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testLogLevelInfo() + throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { final String logFilePath = LOGS_FOLDER + System.currentTimeMillis() + "-testLogLevelInfo.log"; assertFalse(new File(logFilePath).exists()); - - final KnowledgeBase kb = new KnowledgeBaseImpl(); - kb.addRules(rule); - try (final Reasoner instance = Reasoner.getInstance(kb)) { - instance.addFacts(factPc); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - instance.setLogLevel(LogLevel.INFO); - instance.setLogFile(logFilePath); - instance.load(); - instance.setLogLevel(LogLevel.INFO); - instance.reason(); - instance.setLogLevel(LogLevel.INFO); + reasoner.setLogLevel(LogLevel.INFO); + reasoner.setLogFile(logFilePath); + reasoner.load(); + reasoner.setLogLevel(LogLevel.INFO); + reasoner.reason(); + reasoner.setLogLevel(LogLevel.INFO); } final int countLinesReasonLogLevelInfo = readFile(logFilePath); assertTrue(countLinesReasonLogLevelInfo > 0); @@ -153,23 +150,20 @@ public void testLogLevelInfo() throws ReasonerStateException, EdbIdbSeparationEx } @Test - public void testLogLevelDebug() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testLogLevelDebug() + throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { final String logFilePath = LOGS_FOLDER + System.currentTimeMillis() + "-testLogLevelDebug.log"; assertFalse(new File(logFilePath).exists()); - final KnowledgeBase kb = new KnowledgeBaseImpl(); - kb.addRules(rule); - - try (final Reasoner instance = Reasoner.getInstance(kb)) { - instance.addFacts(factPc); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - instance.setLogLevel(LogLevel.DEBUG); - instance.setLogFile(logFilePath); - instance.load(); - instance.setLogLevel(LogLevel.DEBUG); - instance.reason(); - instance.setLogLevel(LogLevel.DEBUG); - instance.close(); + reasoner.setLogLevel(LogLevel.DEBUG); + reasoner.setLogFile(logFilePath); + reasoner.load(); + reasoner.setLogLevel(LogLevel.DEBUG); + reasoner.reason(); + reasoner.setLogLevel(LogLevel.DEBUG); + reasoner.close(); } final int countLinesReasonLogLevelDebug = readFile(logFilePath); assertTrue(countLinesReasonLogLevelDebug > 0); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java index 1d7a52ff5..3a1886179 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java @@ -43,21 +43,25 @@ import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.reasoner.implementation.KnowledgeBaseImpl; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogKnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; /** - * Test case ensuring {@link Reasoner#setReasoningTimeout(Integer)} works as expected and terminates reasoning after the given {@link #timeout}. - * Results are accepted within one second to account for setup and tear down of reasoning resources. + * Test case ensuring {@link Reasoner#setReasoningTimeout(Integer)} works as + * expected and terminates reasoning after the given {@link #timeout}. Results + * are accepted within one second to account for setup and tear down of + * reasoning resources. + * * @author Adrian Bielefeldt * */ public class ReasonerTimeoutTest { - + /** * The timeout after which reasoning should be completed in seconds. */ private static int timeout = 1; - + /** * A list of facts to be used in multiple test runs. */ @@ -66,11 +70,14 @@ public class ReasonerTimeoutTest { * A list of rules to be used in multiple test runs. */ private static List rules = new ArrayList<>(); - + private Reasoner reasoner; - + + private final static VLogKnowledgeBase kb = new VLogKnowledgeBase(); + /** - * The timeout after which reasoning should be completed. One second is added to account for setup and tear down of reasoning resources. + * The timeout after which reasoning should be completed. One second is added to + * account for setup and tear down of reasoning resources. */ @org.junit.Rule public Timeout globalTimeout = Timeout.seconds(timeout + 1); @@ -105,60 +112,62 @@ public static void setUpBeforeClass() { final PositiveLiteral infinite_IDB_yz = makePositiveLiteral(infinite_IDB, y, z); final Rule infinite_rule = makeRule(infinite_IDB_yz, infinite_IDB_xy); rules.add(infinite_rule); + + kb.addRules(rules); + kb.addFacts(facts); } @Before public void setUp() throws ReasonerStateException { - final KnowledgeBase kb = new KnowledgeBaseImpl(); - kb.addRules(rules); - - reasoner = Reasoner.getInstance(kb); - - reasoner.addFacts(facts); + reasoner = new VLogReasoner(kb); } @Test - public void skolem() throws EdbIdbSeparationException, IncompatiblePredicateArityException, IOException, ReasonerStateException { + public void skolem() + throws EdbIdbSeparationException, IncompatiblePredicateArityException, IOException, ReasonerStateException { reasoner.setReasoningTimeout(timeout); reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); - + reasoner.load(); - + reasoner.reason(); } - + @Test - public void restricted() throws EdbIdbSeparationException, IncompatiblePredicateArityException, IOException, ReasonerStateException { + public void restricted() + throws EdbIdbSeparationException, IncompatiblePredicateArityException, IOException, ReasonerStateException { reasoner.setReasoningTimeout(timeout); reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); - + reasoner.load(); - + reasoner.reason(); } - + @Test - public void skolemAfterLoad() throws EdbIdbSeparationException, IncompatiblePredicateArityException, IOException, ReasonerStateException { + public void skolemAfterLoad() + throws EdbIdbSeparationException, IncompatiblePredicateArityException, IOException, ReasonerStateException { reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); - + reasoner.load(); - + reasoner.setReasoningTimeout(timeout); - + reasoner.reason(); } - + @Test - public void restrictedAfterLoad() throws EdbIdbSeparationException, IncompatiblePredicateArityException, IOException, ReasonerStateException { + public void restrictedAfterLoad() + throws EdbIdbSeparationException, IncompatiblePredicateArityException, IOException, ReasonerStateException { reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); - + reasoner.load(); - + reasoner.setReasoningTimeout(timeout); - + reasoner.reason(); } - + @After public void tearDown() { reasoner.close(); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java index d193da4ea..83bdb6df1 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java @@ -27,6 +27,7 @@ import java.util.List; import java.util.Set; +import org.junit.Ignore; import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; @@ -55,23 +56,23 @@ public void testAddDataSourceExistentDataForDifferentPredicates() throws Reasone final Predicate predicateLArity1 = Expressions.makePredicate("l", 1); final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); - final KnowledgeBase kb = new KnowledgeBaseImpl(); - + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); + kb.addFacts(factPredicatePArity2, factPredicateQArity1); + kb.addFactsFromDataSource(predicateLArity1, dataSource); + kb.addFactsFromDataSource(predicateParity1, dataSource); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.addFacts(factPredicatePArity2, factPredicateQArity1); - reasoner.addFactsFromDataSource(predicateLArity1, dataSource); - reasoner.addFactsFromDataSource(predicateParity1, dataSource); reasoner.load(); reasoner.reason(); final QueryResultIterator queryResultIteratorL1 = reasoner.answerQuery( Expressions.makePositiveLiteral(predicateLArity1, Expressions.makeVariable("x")), false); final Set> queryResultsL1 = QueryResultsUtils.collectQueryResults(queryResultIteratorL1); - + final QueryResultIterator queryResultIteratorP1 = reasoner.answerQuery( Expressions.makePositiveLiteral(predicateParity1, Expressions.makeVariable("x")), false); final Set> queryResultsP1 = QueryResultsUtils.collectQueryResults(queryResultIteratorP1); assertEquals(queryResultsL1, queryResultsP1); - + } } @@ -81,96 +82,93 @@ public void testAddDataSourceBeforeLoading() throws ReasonerStateException, EdbI final Predicate predicateP = Expressions.makePredicate("p", 1); final Predicate predicateQ = Expressions.makePredicate("q", 1); final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); - - final KnowledgeBase kb = new KnowledgeBaseImpl(); + + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.addFactsFromDataSource(predicateP, dataSource); - reasoner.addFactsFromDataSource(predicateQ, dataSource); + kb.addFactsFromDataSource(predicateP, dataSource); + kb.addFactsFromDataSource(predicateQ, dataSource); reasoner.load(); } } + // TODO rewrite test + @Ignore @Test(expected = ReasonerStateException.class) public void testAddDataSourceAfterLoading() throws ReasonerStateException, EdbIdbSeparationException, EDBConfigurationException, IOException, IncompatiblePredicateArityException { final Predicate predicateP = Expressions.makePredicate("p", 1); final Predicate predicateQ = Expressions.makePredicate("q", 1); final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); - - final KnowledgeBase kb = new KnowledgeBaseImpl(); + + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.addFactsFromDataSource(predicateP, dataSource); + kb.addFactsFromDataSource(predicateP, dataSource); reasoner.load(); - reasoner.addFactsFromDataSource(predicateQ, dataSource); + kb.addFactsFromDataSource(predicateQ, dataSource); } } + // TODO rewrite test + @Ignore @Test(expected = ReasonerStateException.class) public void testAddDataSourceAfterReasoning() throws ReasonerStateException, EdbIdbSeparationException, EDBConfigurationException, IOException, IncompatiblePredicateArityException { final Predicate predicateP = Expressions.makePredicate("p", 1); final Predicate predicateQ = Expressions.makePredicate("q", 1); final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); - - final KnowledgeBase kb = new KnowledgeBaseImpl(); + + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.addFactsFromDataSource(predicateP, dataSource); + kb.addFactsFromDataSource(predicateP, dataSource); reasoner.load(); reasoner.reason(); - reasoner.addFactsFromDataSource(predicateQ, dataSource); + kb.addFactsFromDataSource(predicateQ, dataSource); } } + // TODO move to a test class for VLogKnowledgeBase @Test(expected = IllegalArgumentException.class) public void testAddDataSourceNoMultipleDataSourcesForPredicate() throws ReasonerStateException, IOException { final Predicate predicate = Expressions.makePredicate("p", 1); final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); - - final KnowledgeBase kb = new KnowledgeBaseImpl(); - - try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.addFactsFromDataSource(predicate, dataSource); - reasoner.addFactsFromDataSource(predicate, dataSource); - } + + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); + kb.addFactsFromDataSource(predicate, dataSource); + kb.addFactsFromDataSource(predicate, dataSource); } + // TODO move to a test class for VLogKnowledgeBase @Test(expected = IllegalArgumentException.class) public void testAddDataSourceNoFactsForPredicate() throws ReasonerStateException, IOException { final Predicate predicate = Expressions.makePredicate("p", 1); final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); - final PositiveLiteral fact = Expressions.makePositiveLiteral(Expressions.makePredicate("p", 1), Expressions.makeConstant("a")); - - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final PositiveLiteral fact = Expressions.makePositiveLiteral(Expressions.makePredicate("p", 1), + Expressions.makeConstant("a")); - try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.addFacts(fact); - reasoner.addFactsFromDataSource(predicate, dataSource); - } + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); + kb.addFacts(fact); + kb.addFactsFromDataSource(predicate, dataSource); } + // TODO move to a test class for VLogKnowledgeBase @Test(expected = NullPointerException.class) public void testAddDataSourcePredicateNotNull() throws ReasonerStateException, IOException { final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); - - final KnowledgeBase kb = new KnowledgeBaseImpl(); - try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.addFactsFromDataSource(null, dataSource); - } + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); + kb.addFactsFromDataSource(null, dataSource); } + // TODO move to a test class for VLogKnowledgeBase @Test(expected = NullPointerException.class) public void testAddDataSourceNotNullDataSource() throws ReasonerStateException { final Predicate predicate = Expressions.makePredicate("p", 1); - final KnowledgeBase kb = new KnowledgeBaseImpl(); - - try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.addFactsFromDataSource(predicate, null); - } + final KnowledgeBase kb = new VLogKnowledgeBase(); + kb.addFactsFromDataSource(predicate, null); } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java index dfa973107..366dd3ab0 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java @@ -69,10 +69,10 @@ public void testEDBQuerySameConstantSubstitutesSameVariableName() @SuppressWarnings("unchecked") final Set> factCCD = Sets.newSet(Arrays.asList(constantC, constantC, constantD)); - final KnowledgeBase kb = new KnowledgeBaseImpl(); - try (final Reasoner reasoner = Reasoner.getInstance(kb)) { - reasoner.addFacts(fact); + try (final Reasoner reasoner = Reasoner.getInstance()) { + final KnowledgeBase kb = reasoner.getKnowledgeBase(); + kb.addFacts(fact); reasoner.load(); final PositiveLiteral queryAtomXYZ = Expressions.makePositiveLiteral(predicate, x, y, z); @@ -113,13 +113,13 @@ public void testIDBQuerySameBlankSubstitutesSameVariableName() Expressions.makeConjunction(Expressions.makePositiveLiteral(predicate, x))); assertEquals(Sets.newSet(y, z), pX__pYY_pYZ.getExistentiallyQuantifiedVariables()); - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); kb.addRules(pX__pYY_pYZ); + kb.addFacts(Expressions.makePositiveLiteral(predicate, Expressions.makeConstant("c"))); - try (final Reasoner reasoner = Reasoner.getInstance(kb)) { + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.SPLIT_HEAD_PIECES); - reasoner.addFacts(Expressions.makePositiveLiteral(predicate, Expressions.makeConstant("c"))); reasoner.load(); reasoner.reason(); @@ -157,11 +157,11 @@ public void testIDBQuerySameIndividualSubstitutesSameVariableName() final Constant constantD = Expressions.makeConstant("d"); final PositiveLiteral factPcd = Expressions.makePositiveLiteral(predicate, constantC, constantD); - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); kb.addRules(pXY__pXYYZZT); + kb.addFacts(factPcd); - try (final Reasoner reasoner = Reasoner.getInstance(kb)) { - reasoner.addFacts(factPcd); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); reasoner.reason(); @@ -224,11 +224,11 @@ public void queryResultWithBlanks() throws ReasonerStateException, EdbIdbSeparat final PositiveLiteral fact = Expressions.makePositiveLiteral("p", constantC); final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("q", Expressions.makeVariable("?x")); - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); kb.addRules(existentialRule); + kb.addFacts(fact); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.addFacts(fact); reasoner.load(); reasoner.reason(); @@ -249,7 +249,7 @@ public void queryResultWithBlanks() throws ReasonerStateException, EdbIdbSeparat @Test public void queryEmptyKnowledgeBase() throws IOException, EdbIdbSeparationException, ReasonerStateException, IncompatiblePredicateArityException { - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); @@ -269,11 +269,11 @@ public void queryEmptyKnowledgeBase() throws IOException, EdbIdbSeparationExcept @Test public void queryEmptyRules() throws IOException, EdbIdbSeparationException, ReasonerStateException, IncompatiblePredicateArityException { - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); + final PositiveLiteral fact = Expressions.makePositiveLiteral("P", Expressions.makeConstant("c")); + kb.addFacts(fact); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - final PositiveLiteral fact = Expressions.makePositiveLiteral("P", Expressions.makeConstant("c")); - reasoner.addFacts(fact); reasoner.load(); final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("P", Expressions.makeVariable("?x")); @@ -295,7 +295,7 @@ public void queryEmptyFacts() final Variable vx = Expressions.makeVariable("x"); final Rule rule = Expressions.makeRule(Expressions.makePositiveLiteral("q", vx), Expressions.makePositiveLiteral("p", vx)); - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); kb.addRules(rule); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExportQueryAnswersToCsvFileTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExportQueryAnswersToCsvFileTest.java index 7fb92f020..f7b2bd04c 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExportQueryAnswersToCsvFileTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExportQueryAnswersToCsvFileTest.java @@ -32,8 +32,6 @@ import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; @@ -55,10 +53,10 @@ public void testEDBQuerySameConstantSubstitutesSameVariableName() // final String csvFilePath = CSV_EXPORT_FOLDER + "output"; final List> factCCD = Arrays.asList(Arrays.asList("c", "c", "d")); - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); + kb.addFacts(fact); - try (final Reasoner reasoner = Reasoner.getInstance(kb)) { - reasoner.addFacts(fact); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); final PositiveLiteral queryAtomXYZ = Expressions.makePositiveLiteral(predicate, x, y, z); @@ -94,9 +92,9 @@ public void testExportQueryEmptyKnowledgeBase() final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("p", Expressions.makeVariable("?x"), Expressions.makeVariable("?y")); - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); - try (final Reasoner reasoner = Reasoner.getInstance(kb)) { + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); final String emptyFilePath = FileDataSourceTestUtils.OUTPUT_FOLDER + "empty.csv"; reasoner.exportQueryAnswersToCsv(queryAtom, emptyFilePath, true); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java index f6f147b28..66ea14e60 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java @@ -36,7 +36,6 @@ import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.reasoner.Algorithm; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; @@ -105,10 +104,10 @@ public static void testLoadEmptyFile(final Predicate predicate, final PositiveLi final FileDataSource emptyFileDataSource) throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); + kb.addFactsFromDataSource(predicate, emptyFileDataSource); - try (final Reasoner reasoner = Reasoner.getInstance(kb)) { - reasoner.addFactsFromDataSource(predicate, emptyFileDataSource); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); reasoner.reason(); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java index 275965fc0..391b617d6 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java @@ -35,7 +35,6 @@ import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.Algorithm; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; @@ -51,38 +50,42 @@ public class GeneratedAnonymousIndividualsTest { private static final Variable vy = Expressions.makeVariable("y"); private static final Variable vz = Expressions.makeVariable("z"); private static final String p = "p"; - + // rule: P(?x) -> P(?x,!y), P(?x,!z) private static final Rule existentialRule = Expressions.makeRule( - Expressions.makePositiveConjunction(Expressions.makePositiveLiteral(p, vx, vy), Expressions.makePositiveLiteral(p, vx, vz)), + Expressions.makePositiveConjunction(Expressions.makePositiveLiteral(p, vx, vy), + Expressions.makePositiveLiteral(p, vx, vz)), Expressions.makeConjunction(Expressions.makePositiveLiteral(p, vx))); - static { - // y,z existential variables that can introduce blanks (anonymous individuals) - assertEquals(Sets.newSet(vy, vz), existentialRule.getExistentiallyQuantifiedVariables()); - } + private static VLogKnowledgeBase kb = new VLogKnowledgeBase(); // fact: P(c) private static final Constant constantC = Expressions.makeConstant("c"); private static final PositiveLiteral fact = Expressions.makePositiveLiteral(p, constantC); // query: P(?x,?y) ? - final PositiveLiteral queryAtom = Expressions.makePositiveLiteral(p, Expressions.makeVariable("?x"), Expressions.makeVariable("?y")); + final PositiveLiteral queryAtom = Expressions.makePositiveLiteral(p, Expressions.makeVariable("?x"), + Expressions.makeVariable("?y")); + + static { + // y,z existential variables that can introduce blanks (anonymous individuals) + assertEquals(Sets.newSet(vy, vz), existentialRule.getExistentiallyQuantifiedVariables()); + + kb.addRules(existentialRule); + kb.addFacts(fact); + } @Test public void testBlanksSkolemChaseNoRuleRewrite() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - final KnowledgeBase kb = new KnowledgeBaseImpl(); - kb.addRules(existentialRule); - try (final Reasoner reasoner = Reasoner.getInstance(kb)) { + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); assertEquals(RuleRewriteStrategy.NONE, reasoner.getRuleRewriteStrategy()); - - reasoner.addFacts(fact); + reasoner.load(); reasoner.reason(); reasoner.exportQueryAnswersToCsv(queryAtom, includeBlanksFilePath, true); - + checkTowDistinctBlanksGenerated(reasoner); } } @@ -90,20 +93,17 @@ public void testBlanksSkolemChaseNoRuleRewrite() @Test public void testBlanksSkolemChaseSplitHeadPieces() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - final KnowledgeBase kb = new KnowledgeBaseImpl(); - kb.addRules(existentialRule); - try (final Reasoner reasoner = Reasoner.getInstance(kb)) { + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); // P(?x) -> P(?x,!y), P(?x,!z) // after split becomes {{P(?x) -> P(?x,!y), {P(?x)-> P(?x,!z)}} reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.SPLIT_HEAD_PIECES); - - reasoner.addFacts(fact); + reasoner.load(); reasoner.reason(); reasoner.exportQueryAnswersToCsv(queryAtom, includeBlanksFilePath, true); - + checkTowDistinctBlanksGenerated(reasoner); } } @@ -111,18 +111,15 @@ public void testBlanksSkolemChaseSplitHeadPieces() @Test public void testBlanksRestrictedChaseNoRuleRewrite() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - final KnowledgeBase kb = new KnowledgeBaseImpl(); - kb.addRules(existentialRule); - try (final Reasoner reasoner = Reasoner.getInstance(kb)) { + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); assertEquals(RuleRewriteStrategy.NONE, reasoner.getRuleRewriteStrategy()); - - reasoner.addFacts(fact); + reasoner.load(); reasoner.reason(); reasoner.exportQueryAnswersToCsv(queryAtom, includeBlanksFilePath, true); - + checkTowDistinctBlanksGenerated(reasoner); } } @@ -130,22 +127,20 @@ public void testBlanksRestrictedChaseNoRuleRewrite() @Test public void testBlanksRestrictedChaseSplitHeadPieces() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - final KnowledgeBase kb = new KnowledgeBaseImpl(); - kb.addRules(existentialRule); - try (final Reasoner reasoner = Reasoner.getInstance(kb)) { + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); // {P(?x) -> P(?x,!y), P(?x,!z)} // after split becomes {{P(?x) -> P(?x,!y), {P(?x)-> P(?x,!z)}} reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.SPLIT_HEAD_PIECES); - reasoner.addFacts(fact); reasoner.load(); reasoner.reason(); reasoner.exportQueryAnswersToCsv(queryAtom, includeBlanksFilePath, true); // expected fact: P(c, _:b) - final List> csvContentIncludeBlanks = FileDataSourceTestUtils.getCSVContent(includeBlanksFilePath); + final List> csvContentIncludeBlanks = FileDataSourceTestUtils + .getCSVContent(includeBlanksFilePath); assertTrue(csvContentIncludeBlanks.size() == 1); for (final List queryResult : csvContentIncludeBlanks) { assertTrue(queryResult.size() == 2); @@ -155,7 +150,8 @@ public void testBlanksRestrictedChaseSplitHeadPieces() assertNotEquals("c", blank); reasoner.exportQueryAnswersToCsv(queryAtom, excludeBlanksFilePath, false); - final List> csvContentExcludeBlanks = FileDataSourceTestUtils.getCSVContent(excludeBlanksFilePath); + final List> csvContentExcludeBlanks = FileDataSourceTestUtils + .getCSVContent(excludeBlanksFilePath); assertTrue(csvContentExcludeBlanks.isEmpty()); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromCsvFileTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromCsvFileTest.java index 2dd81d66f..624739778 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromCsvFileTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromCsvFileTest.java @@ -37,8 +37,6 @@ import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; @@ -76,11 +74,11 @@ public void testLoadUnaryFactsFromCsvFile() throws ReasonerStateException, EdbId private void testLoadUnaryFactsFromSingleCsvDataSource(final FileDataSource fileDataSource) throws ReasonerStateException, EdbIdbSeparationException, EDBConfigurationException, IOException, IncompatiblePredicateArityException { - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); + kb.addFactsFromDataSource(unaryPredicate1, fileDataSource); + kb.addFactsFromDataSource(unaryPredicate2, fileDataSource); - try (final Reasoner reasoner = Reasoner.getInstance(kb)) { - reasoner.addFactsFromDataSource(unaryPredicate1, fileDataSource); - reasoner.addFactsFromDataSource(unaryPredicate2, fileDataSource); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); final QueryResultIterator queryResultIterator1 = reasoner @@ -110,10 +108,10 @@ public void testLoadNonexistingCsvFile() final File nonexistingFile = new File("nonexistingFile.csv"); assertFalse(nonexistingFile.exists()); final FileDataSource fileDataSource = new CsvFileDataSource(nonexistingFile); - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); + kb.addFactsFromDataSource(unaryPredicate1, fileDataSource); - try (final Reasoner reasoner = Reasoner.getInstance(kb)) { - reasoner.addFactsFromDataSource(unaryPredicate1, fileDataSource); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); } } @@ -123,10 +121,10 @@ public void testLoadCsvFileWrongArity() throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { final FileDataSource fileDataSource = new CsvFileDataSource( new File(FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.binaryCsvFileNameRoot + ".csv")); - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); + kb.addFactsFromDataSource(unaryPredicate1, fileDataSource); - try (final Reasoner reasoner = Reasoner.getInstance(kb)) { - reasoner.addFactsFromDataSource(unaryPredicate1, fileDataSource); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java index 54890d269..76d8ed403 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java @@ -38,57 +38,56 @@ public class LoadDataFromMemoryTest { @Test(expected = EdbIdbSeparationException.class) - public void loadEdbIdbNotSeparated() - throws EDBConfigurationException, IOException, EdbIdbSeparationException, ReasonerStateException, IncompatiblePredicateArityException { + public void loadEdbIdbNotSeparated() throws EDBConfigurationException, IOException, EdbIdbSeparationException, + ReasonerStateException, IncompatiblePredicateArityException { final Variable vx = Expressions.makeVariable("x"); - final Rule rule = Expressions.makeRule(Expressions.makePositiveLiteral("q", vx), Expressions.makePositiveLiteral("p", vx)); + final Rule rule = Expressions.makeRule(Expressions.makePositiveLiteral("q", vx), + Expressions.makePositiveLiteral("p", vx)); final PositiveLiteral factIDBpredQ1 = Expressions.makePositiveLiteral("q", Expressions.makeConstant("c")); final PositiveLiteral factEDBpredQ2 = Expressions.makePositiveLiteral("q", Expressions.makeConstant("d"), Expressions.makeConstant("d")); - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); kb.addRules(rule); + kb.addFacts(factIDBpredQ1, factEDBpredQ2); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.addFacts(factIDBpredQ1, factEDBpredQ2); reasoner.load(); } } @Test - public void loadEdbIdbSeparated() - throws EDBConfigurationException, IOException, EdbIdbSeparationException, ReasonerStateException, IncompatiblePredicateArityException { + public void loadEdbIdbSeparated() throws EDBConfigurationException, IOException, EdbIdbSeparationException, + ReasonerStateException, IncompatiblePredicateArityException { final Variable vx = Expressions.makeVariable("x"); - final Rule rule = Expressions.makeRule(Expressions.makePositiveLiteral("q", vx), Expressions.makePositiveLiteral("p", vx)); + final Rule rule = Expressions.makeRule(Expressions.makePositiveLiteral("q", vx), + Expressions.makePositiveLiteral("p", vx)); final PositiveLiteral factEDBpred = Expressions.makePositiveLiteral("q", Expressions.makeConstant("d"), Expressions.makeConstant("d")); - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); kb.addRules(rule); + kb.addFacts(factEDBpred); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.addFacts(factEDBpred); reasoner.load(); } } + // TODO move to a test class for KnowledgeBase @Test(expected = IllegalArgumentException.class) public void addFactsWithVariableTerms() throws ReasonerStateException { - final PositiveLiteral factWithVariableTerms = Expressions.makePositiveLiteral("q", Expressions.makeConstant("d"), - Expressions.makeVariable("x")); - final KnowledgeBase kb = new KnowledgeBaseImpl(); - - try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.addFacts(factWithVariableTerms); - } + final PositiveLiteral factWithVariableTerms = Expressions.makePositiveLiteral("q", + Expressions.makeConstant("d"), Expressions.makeVariable("x")); + final KnowledgeBase kb = new VLogKnowledgeBase(); + kb.addFacts(factWithVariableTerms); } + // TODO move to a test class for KnowledgeBase @Test(expected = IllegalArgumentException.class) public void addFactsWithBlankTerms() throws ReasonerStateException { - final PositiveLiteral factWithBlankTerms = Expressions.makePositiveLiteral("q", Expressions.makeConstant("d"), new BlankImpl("b")); - final KnowledgeBase kb = new KnowledgeBaseImpl(); - - try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.addFacts(factWithBlankTerms); - } + final PositiveLiteral factWithBlankTerms = Expressions.makePositiveLiteral("q", Expressions.makeConstant("d"), + new BlankImpl("b")); + final KnowledgeBase kb = new VLogKnowledgeBase(); + kb.addFacts(factWithBlankTerms); } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java index b46298dce..8bdf42d64 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java @@ -37,8 +37,6 @@ import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; @@ -77,10 +75,10 @@ public void testLoadTernaryFactsFromRdfFile() throws ReasonerStateException, Edb public void testLoadTernaryFactsFromSingleRdfDataSource(final FileDataSource fileDataSource) throws ReasonerStateException, EdbIdbSeparationException, EDBConfigurationException, IOException, IncompatiblePredicateArityException { - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); + kb.addFactsFromDataSource(ternaryPredicate, fileDataSource); - try (final Reasoner reasoner = Reasoner.getInstance(kb)) { - reasoner.addFactsFromDataSource(ternaryPredicate, fileDataSource); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtom, true); @@ -96,10 +94,10 @@ public void testLoadNonexistingRdfFile() final File nonexistingFile = new File("nonexistingFile.nt"); assertFalse(nonexistingFile.exists()); final FileDataSource fileDataSource = new RdfFileDataSource(nonexistingFile); - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); + kb.addFactsFromDataSource(ternaryPredicate, fileDataSource); - try (final Reasoner reasoner = Reasoner.getInstance(kb)) { - reasoner.addFactsFromDataSource(ternaryPredicate, fileDataSource); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); } } @@ -109,10 +107,10 @@ public void testLoadRdfInvalidFormat() throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { final FileDataSource fileDataSource = new RdfFileDataSource( new File(FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.invalidFormatNtFileNameRoot + ".nt")); - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); + kb.addFactsFromDataSource(ternaryPredicate, fileDataSource); - try (final Reasoner reasoner = Reasoner.getInstance(kb)) { - reasoner.addFactsFromDataSource(ternaryPredicate, fileDataSource); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); FileDataSourceTestUtils.testNoFactsOverPredicate(reasoner, queryAtom); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromSparqlQueryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromSparqlQueryTest.java index 64548b8dd..1d2d083b8 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromSparqlQueryTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromSparqlQueryTest.java @@ -34,8 +34,6 @@ import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; @@ -61,10 +59,10 @@ public void testSimpleSparqlQuery() // a has father b "?a wdt:P22 ?b"); final Predicate fatherOfPredicate = Expressions.makePredicate("FatherOf", 2); - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); + kb.addFactsFromDataSource(fatherOfPredicate, dataSource); - try (final Reasoner reasoner = Reasoner.getInstance(kb)) { - reasoner.addFactsFromDataSource(fatherOfPredicate, dataSource); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); try (final QueryResultIterator answerQuery = reasoner.answerQuery(Expressions.makePositiveLiteral(fatherOfPredicate, Expressions.makeVariable("x"), Expressions.makeVariable("y")), false)) { @@ -87,10 +85,10 @@ public void testSimpleSparqlQueryHttps() // a has father b "?a wdt:P22 ?b"); final Predicate fatherOfPredicate = Expressions.makePredicate("FatherOf", 2); - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); + kb.addFactsFromDataSource(fatherOfPredicate, dataSource); - try (final Reasoner reasoner = Reasoner.getInstance(kb)) { - reasoner.addFactsFromDataSource(fatherOfPredicate, dataSource); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); try (final QueryResultIterator answerQuery = reasoner.answerQuery(Expressions.makePositiveLiteral(fatherOfPredicate, Expressions.makeVariable("x"), Expressions.makeVariable("y")), false)) { @@ -121,10 +119,10 @@ public void testSimpleSparqlQuery2() // a has father b "?a wdt:P22 ?b ."); final Predicate fatherOfPredicate = Expressions.makePredicate("FatherOf", 2); - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); + kb.addFactsFromDataSource(fatherOfPredicate, dataSource); - try (final Reasoner reasoner = Reasoner.getInstance(kb)) { - reasoner.addFactsFromDataSource(fatherOfPredicate, dataSource); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); try (final QueryResultIterator answerQuery = reasoner.answerQuery(Expressions.makePositiveLiteral(fatherOfPredicate, Expressions.makeVariable("x"), Expressions.makeVariable("y")), false)) { @@ -145,10 +143,10 @@ public void testConjunctiveQueryNewLineCharacterInQueryBody() // b has father a and b has mother c "?b wdt:P22 ?a .\n" + "?b wdt:P25 ?c"); final Predicate haveChildrenTogether = Expressions.makePredicate("haveChildrenTogether", 2); - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); + kb.addFactsFromDataSource(haveChildrenTogether, dataSource); - try (final Reasoner reasoner = Reasoner.getInstance(kb)) { - reasoner.addFactsFromDataSource(haveChildrenTogether, dataSource); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); reasoner.answerQuery(Expressions.makePositiveLiteral(haveChildrenTogether, Expressions.makeVariable("x"), Expressions.makeVariable("y")), false); @@ -166,10 +164,10 @@ public void testConjunctiveQuery() // b has father a and b has mother c "?b wdt:P22 ?a ." + "?b wdt:P25 ?c"); final Predicate haveChildrenTogether = Expressions.makePredicate("haveChildrenTogether", 2); - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); + kb.addFactsFromDataSource(haveChildrenTogether, dataSource); - try (final Reasoner reasoner = Reasoner.getInstance(kb)) { - reasoner.addFactsFromDataSource(haveChildrenTogether, dataSource); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); try (final QueryResultIterator answerQuery = reasoner.answerQuery(Expressions.makePositiveLiteral(haveChildrenTogether, Expressions.makeVariable("x"), Expressions.makeVariable("y")), false)) { @@ -189,11 +187,10 @@ public void testDataSourcePredicateDoesNotMatchSparqlQueryTerms() final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(endpoint, queryVariables, // b has father a and b has mother c "?b wdt:P22 ?a ." + "?b wdt:P25 ?c"); - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); + kb.addFactsFromDataSource(Expressions.makePredicate("ternary", 3), dataSource); - try (final Reasoner reasoner = Reasoner.getInstance(kb)) { - // TODO must validate predicate arity sonner - reasoner.addFactsFromDataSource(Expressions.makePredicate("ternary", 3), dataSource); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java index 44651db01..dcec9122e 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java @@ -66,16 +66,14 @@ public class ReasonerStateTest { @Test(expected = NullPointerException.class) public void testSetAlgorithm() { - final KnowledgeBase kb = new KnowledgeBaseImpl(); - try (final Reasoner reasoner = Reasoner.getInstance(kb);) { + try (final Reasoner reasoner = Reasoner.getInstance();) { reasoner.setAlgorithm(null); } } @Test(expected = IllegalArgumentException.class) public void testSetReasoningTimeout() { - final KnowledgeBase kb = new KnowledgeBaseImpl(); - try (final Reasoner reasoner = Reasoner.getInstance(kb);) { + try (final Reasoner reasoner = Reasoner.getInstance();) { reasoner.setReasoningTimeout(-3); } } @@ -85,9 +83,8 @@ public void testSetReasoningTimeout() { @Test(expected = ReasonerStateException.class) public void testAddRules1() throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { - final KnowledgeBase kb = new KnowledgeBaseImpl(); - kb.addRules(ruleQxPx); - try (final Reasoner reasoner = Reasoner.getInstance(kb);) { + try (final Reasoner reasoner = Reasoner.getInstance();) { + reasoner.getKnowledgeBase().addRules(ruleQxPx); reasoner.load(); } } @@ -95,58 +92,60 @@ public void testAddRules1() @Test public void testAddRules2() throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); kb.addRules(ruleQxPx); - try (final Reasoner reasoner = Reasoner.getInstance(kb);) { + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); reasoner.resetReasoner(); } } @Test(expected = IllegalArgumentException.class) - public void testAddRules3() { - final KnowledgeBase kb = new KnowledgeBaseImpl(); + public void testAddRules3() { + final KnowledgeBase kb = new VLogKnowledgeBase(); final List rules = new ArrayList<>(); rules.add(ruleQxPx); rules.add(null); kb.addRules(rules); } - @Test(expected = ReasonerStateException.class) + // FIXME update test + @Ignore + @Test public void testAddFacts1() throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { - final KnowledgeBase kb = new KnowledgeBaseImpl(); - try (final Reasoner reasoner = Reasoner.getInstance(kb);) { + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); + kb.addFacts(factPc); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); - reasoner.addFacts(factPc); } } @Test(expected = IllegalArgumentException.class) public void testAddFacts2() throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); + final List facts = new ArrayList<>(); + facts.add(factPc); + facts.add(null); + kb.addFacts(facts); - try (final Reasoner reasoner = Reasoner.getInstance(kb);) { - final List facts = new ArrayList<>(); - facts.add(factPc); - facts.add(null); - reasoner.addFacts(facts); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); } } @Test public void testResetBeforeLoad() throws ReasonerStateException { - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBaseImpl())) { + try (final Reasoner reasoner = Reasoner.getInstance()) { reasoner.resetReasoner(); } } @Test(expected = NullPointerException.class) public void setRuleRewriteStrategy1() throws ReasonerStateException { - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBaseImpl());) { + try (final Reasoner reasoner = Reasoner.getInstance();) { reasoner.setRuleRewriteStrategy(null); } } @@ -154,7 +153,7 @@ public void setRuleRewriteStrategy1() throws ReasonerStateException { @Test(expected = ReasonerStateException.class) public void setRuleRewriteStrategy2() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBaseImpl());) { + try (final Reasoner reasoner = Reasoner.getInstance();) { reasoner.load(); reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.NONE); } @@ -163,7 +162,7 @@ public void setRuleRewriteStrategy2() @Test public void setRuleRewriteStrategy3() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBaseImpl());) { + try (final Reasoner reasoner = Reasoner.getInstance();) { reasoner.load(); reasoner.resetReasoner(); reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.NONE); @@ -173,13 +172,13 @@ public void setRuleRewriteStrategy3() @Test public void testResetDiscardInferences() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); kb.addRules(ruleQxPx); + kb.addFacts(factPc); for (final Algorithm algorithm : Algorithm.values()) { // discard inferences regardless of the inference algorithm - try (final Reasoner reasoner = Reasoner.getInstance(kb);) { - reasoner.addFacts(factPc); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.setAlgorithm(algorithm); reasoner.load(); @@ -210,17 +209,17 @@ public void testResetDiscardInferences() @Test public void testResetKeepExplicitDatabase() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); kb.addRules(ruleQxPx); - - try (final Reasoner reasoner = Reasoner.getInstance(kb);) { - // assert p(c) - reasoner.addFacts(factPc); - // assert r(d) - final Predicate predicateR1 = Expressions.makePredicate("r", 1); - reasoner.addFactsFromDataSource(predicateR1, - new CsvFileDataSource(new File(FileDataSourceTestUtils.INPUT_FOLDER, "constantD.csv"))); - // p(?x) -> q(?x) + // assert p(c) + kb.addFacts(factPc); + // assert r(d) + final Predicate predicateR1 = Expressions.makePredicate("r", 1); + kb.addFactsFromDataSource(predicateR1, + new CsvFileDataSource(new File(FileDataSourceTestUtils.INPUT_FOLDER, "constantD.csv"))); + // p(?x) -> q(?x) + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); checkExplicitFacts(reasoner, predicateR1); @@ -257,56 +256,57 @@ private void checkExplicitFacts(final Reasoner reasoner, final Predicate predica @Test public void testResetEmptyKnowledgeBase() throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); - final Reasoner reasoner = Reasoner.getInstance(kb); - // 1. load and reason - reasoner.load(); - try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(exampleQueryAtom, true)) { - assertFalse(queryResultIterator.hasNext()); - } - reasoner.reason(); - try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(exampleQueryAtom, true)) { - assertFalse(queryResultIterator.hasNext()); - } - reasoner.resetReasoner(); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + // 1. load and reason + reasoner.load(); + try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(exampleQueryAtom, true)) { + assertFalse(queryResultIterator.hasNext()); + } + reasoner.reason(); + try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(exampleQueryAtom, true)) { + assertFalse(queryResultIterator.hasNext()); + } + reasoner.resetReasoner(); - // 2. load again - reasoner.load(); - try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(exampleQueryAtom, true)) { - assertFalse(queryResultIterator.hasNext()); - } - reasoner.resetReasoner(); + // 2. load again + reasoner.load(); + try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(exampleQueryAtom, true)) { + assertFalse(queryResultIterator.hasNext()); + } + reasoner.resetReasoner(); - // 3. load and reason again - reasoner.load(); - try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(exampleQueryAtom, true)) { - assertFalse(queryResultIterator.hasNext()); - } - reasoner.reason(); - try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(exampleQueryAtom, true)) { - assertFalse(queryResultIterator.hasNext()); + // 3. load and reason again + reasoner.load(); + try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(exampleQueryAtom, true)) { + assertFalse(queryResultIterator.hasNext()); + } + reasoner.reason(); + try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(exampleQueryAtom, true)) { + assertFalse(queryResultIterator.hasNext()); + } + reasoner.close(); } - reasoner.close(); } @Test(expected = ReasonerStateException.class) public void testFailReasonBeforeLoad() throws ReasonerStateException, IOException { - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBaseImpl())) { + try (final Reasoner reasoner = Reasoner.getInstance()) { reasoner.reason(); } } @Test(expected = ReasonerStateException.class) public void testFailAnswerQueryBeforeLoad() throws ReasonerStateException { - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBaseImpl())) { + try (final Reasoner reasoner = Reasoner.getInstance()) { reasoner.answerQuery(exampleQueryAtom, true); } } @Test(expected = ReasonerStateException.class) public void testFailExportQueryAnswerToCsvBeforeLoad() throws ReasonerStateException, IOException { - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBaseImpl())) { + try (final Reasoner reasoner = Reasoner.getInstance()) { reasoner.exportQueryAnswersToCsv(exampleQueryAtom, FileDataSourceTestUtils.OUTPUT_FOLDER + "output.csv", true); } @@ -315,7 +315,7 @@ public void testFailExportQueryAnswerToCsvBeforeLoad() throws ReasonerStateExcep @Test public void testSuccessiveCloseAfterLoad() throws EdbIdbSeparationException, IOException, IncompatiblePredicateArityException, ReasonerStateException { - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBaseImpl())) { + try (final Reasoner reasoner = Reasoner.getInstance()) { reasoner.load(); reasoner.close(); reasoner.close(); @@ -324,7 +324,7 @@ public void testSuccessiveCloseAfterLoad() @Test public void testSuccessiveCloseBeforeLoad() { - try (final Reasoner reasoner = Reasoner.getInstance(new KnowledgeBaseImpl())) { + try (final Reasoner reasoner = Reasoner.getInstance()) { reasoner.close(); reasoner.close(); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerTest.java index 861ac3667..d615ab375 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerTest.java @@ -37,7 +37,6 @@ import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; @@ -63,11 +62,11 @@ public class ReasonerTest { @Test public void testCloseRepeatedly() throws EdbIdbSeparationException, IOException, IncompatiblePredicateArityException, ReasonerStateException { - try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBaseImpl())) { + try (final VLogReasoner reasoner = new VLogReasoner(new VLogKnowledgeBase())) { reasoner.close(); } - try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBaseImpl())) { + try (final VLogReasoner reasoner = new VLogReasoner(new VLogKnowledgeBase())) { reasoner.load(); reasoner.close(); reasoner.close(); @@ -77,7 +76,7 @@ public void testCloseRepeatedly() @Test public void testLoadRules() throws EdbIdbSeparationException, IOException, IncompatiblePredicateArityException, ReasonerStateException { - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); kb.addRules(ruleBxAx, ruleCxBx); kb.addRules(ruleBxAx); @@ -89,11 +88,11 @@ public void testLoadRules() @Test public void testSimpleInference() throws EDBConfigurationException, IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); kb.addRules(ruleBxAx, ruleCxBx); + kb.addFacts(factAc, factAd); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.addFacts(factAc, factAd); reasoner.load(); final QueryResultIterator cxQueryResultEnumBeforeReasoning = reasoner.answerQuery(atomCx, true); @@ -112,12 +111,12 @@ public void testSimpleInference() throws EDBConfigurationException, IOException, } } + // TODO move to a test class for KnowledgeBase @Test public void testGenerateDataSourcesConfigEmpty() throws ReasonerStateException, IOException { - try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBaseImpl())) { - final String dataSourcesConfig = reasoner.generateDataSourcesConfig(); - assertTrue(dataSourcesConfig.isEmpty()); - } + final VLogKnowledgeBase knowledgeBase = new VLogKnowledgeBase(); + final String dataSourcesConfig = knowledgeBase.generateDataSourcesConfig(); + assertTrue(dataSourcesConfig.isEmpty()); } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/StratifiedNegationTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/StratifiedNegationTest.java index dfd93bb26..2907d677e 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/StratifiedNegationTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/StratifiedNegationTest.java @@ -39,8 +39,6 @@ import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; @@ -60,17 +58,16 @@ public void testNotStratifiableEdbIdbSeparation() final Rule rule = makeRule(qXY, pXY, notQXY); final PositiveLiteral fact = makePositiveLiteral("Q", makeConstant("c"), makeConstant("d")); - - final KnowledgeBase kb = new KnowledgeBaseImpl(); - kb.addRules(rule); - try (final Reasoner reasoner = Reasoner.getInstance(kb)) { - reasoner.addFacts(fact); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); + kb.addRules(rule); + kb.addFacts(fact); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); } } - + @Test(expected = RuntimeException.class) public void testNotStratifiable() throws EdbIdbSeparationException, IncompatiblePredicateArityException, ReasonerStateException, IOException { @@ -84,18 +81,17 @@ public void testNotStratifiable() final Rule rule = makeRule(qXY, pXY, notQXY); final PositiveLiteral fact = makePositiveLiteral("P", makeConstant("c"), makeConstant("d")); - - final KnowledgeBase kb = new KnowledgeBaseImpl(); - kb.addRules(rule); - try (final Reasoner reasoner = Reasoner.getInstance(kb)) { - reasoner.addFacts(fact); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); + kb.addRules(rule); + kb.addFacts(fact); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); reasoner.reason(); } } - + @Test public void testStratifiable() throws EdbIdbSeparationException, IncompatiblePredicateArityException, ReasonerStateException, IOException { @@ -113,27 +109,26 @@ public void testStratifiable() final Constant e = makeConstant("e"); final Constant f = makeConstant("f"); final PositiveLiteral pEF = makePositiveLiteral("P", e, f); - + final PositiveLiteral qCD = makePositiveLiteral("Q", makeConstant("c"), makeConstant("d")); - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); kb.addRules(rule); + kb.addFacts(pCD, pEF, qCD); - try (final Reasoner reasoner = Reasoner.getInstance(kb)) { - reasoner.addFacts(pCD, pEF, qCD); - + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); reasoner.reason(); - - try(QueryResultIterator result=reasoner.answerQuery(sXY, true)){ + + try (QueryResultIterator result = reasoner.answerQuery(sXY, true)) { assertTrue(result.hasNext()); final QueryResult answer = result.next(); - assertEquals(answer.getTerms(),Arrays.asList(e, f)); + assertEquals(answer.getTerms(), Arrays.asList(e, f)); assertFalse(result.hasNext()); } } } - + @Test public void testInputNegation() throws EdbIdbSeparationException, IncompatiblePredicateArityException, ReasonerStateException, IOException { @@ -150,22 +145,21 @@ public void testInputNegation() final Constant e = makeConstant("e"); final Constant f = makeConstant("f"); final PositiveLiteral pEF = makePositiveLiteral("P", e, f); - + final PositiveLiteral qCD = makePositiveLiteral("Q", makeConstant("c"), makeConstant("d")); - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); kb.addRules(rule); + kb.addFacts(pCD, pEF, qCD); - try (final Reasoner reasoner = Reasoner.getInstance(kb)) { - reasoner.addFacts(pCD, pEF, qCD); - + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); reasoner.reason(); - - try(QueryResultIterator result=reasoner.answerQuery(sXY, true)){ + + try (QueryResultIterator result = reasoner.answerQuery(sXY, true)) { assertTrue(result.hasNext()); final QueryResult answer = result.next(); - assertEquals(answer.getTerms(),Arrays.asList(e, f)); + assertEquals(answer.getTerms(), Arrays.asList(e, f)); assertFalse(result.hasNext()); } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java index 1569dfab7..2492d4dec 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java @@ -33,15 +33,14 @@ import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.DataSource; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.reasoner.implementation.KnowledgeBaseImpl; import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogKnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; import org.semanticweb.vlog4j.graal.GraalToVLog4JModelConverter; import fr.lirmm.graphik.graal.io.dlp.DlgpParser; @@ -65,7 +64,8 @@ public static void main(final String[] args) final URL wikidataSparqlEndpoint = new URL("https://query.wikidata.org/sparql"); - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); + /* Load rules from DLGP file */ try (final DlgpParser parser = new DlgpParser( new File(ExamplesUtils.INPUT_FOLDER + "/graal", "doid-example.dlgp"))) { @@ -91,36 +91,34 @@ public static void main(final String[] args) final NegativeLiteral hasNotDoid = Expressions.makeNegativeLiteral("hasDoid", y); kb.addRules(Expressions.makeRule(Expressions.makePositiveConjunction(humansWhoDiedOfNoncancer), Expressions.makeConjunction(deathCause, hasNotDoid))); - - try (final Reasoner reasoner = Reasoner.getInstance(kb)) { - - /* Configure RDF data source */ - final Predicate doidTriplePredicate = makePredicate("doidTriple", 3); - final DataSource doidDataSource = new RdfFileDataSource( - new File(ExamplesUtils.INPUT_FOLDER + "doid.nt.gz")); - reasoner.addFactsFromDataSource(doidTriplePredicate, doidDataSource); - - /* Configure SPARQL data sources */ - final String sparqlHumansWithDisease = "?disease wdt:P699 ?doid ."; - // (wdt:P669 = "Disease Ontology ID") - final DataSource diseasesDataSource = new SparqlQueryResultDataSource(wikidataSparqlEndpoint, - "disease,doid", sparqlHumansWithDisease); - final Predicate diseaseIdPredicate = Expressions.makePredicate("diseaseId", 2); - reasoner.addFactsFromDataSource(diseaseIdPredicate, diseasesDataSource); - - final String sparqlRecentDeaths = "?human wdt:P31 wd:Q5; wdt:P570 ?deathDate . FILTER (YEAR(?deathDate) = 2018)"; - // (wdt:P31 = "instance of"; wd:Q5 = "human", wdt:570 = "date of death") - final DataSource recentDeathsDataSource = new SparqlQueryResultDataSource(wikidataSparqlEndpoint, "human", - sparqlRecentDeaths); - final Predicate recentDeathsPredicate = Expressions.makePredicate("recentDeaths", 1); - reasoner.addFactsFromDataSource(recentDeathsPredicate, recentDeathsDataSource); - - final String sparqlRecentDeathsCause = sparqlRecentDeaths + "?human wdt:P509 ?causeOfDeath . "; - // (wdt:P509 = "cause of death") - final DataSource recentDeathsCauseDataSource = new SparqlQueryResultDataSource(wikidataSparqlEndpoint, - "human,causeOfDeath", sparqlRecentDeathsCause); - final Predicate recentDeathsCausePredicate = Expressions.makePredicate("recentDeathsCause", 2); - reasoner.addFactsFromDataSource(recentDeathsCausePredicate, recentDeathsCauseDataSource); + /* Configure RDF data source */ + final Predicate doidTriplePredicate = makePredicate("doidTriple", 3); + final DataSource doidDataSource = new RdfFileDataSource(new File(ExamplesUtils.INPUT_FOLDER + "doid.nt.gz")); + kb.addFactsFromDataSource(doidTriplePredicate, doidDataSource); + + /* Configure SPARQL data sources */ + final String sparqlHumansWithDisease = "?disease wdt:P699 ?doid ."; + // (wdt:P669 = "Disease Ontology ID") + final DataSource diseasesDataSource = new SparqlQueryResultDataSource(wikidataSparqlEndpoint, "disease,doid", + sparqlHumansWithDisease); + final Predicate diseaseIdPredicate = Expressions.makePredicate("diseaseId", 2); + kb.addFactsFromDataSource(diseaseIdPredicate, diseasesDataSource); + + final String sparqlRecentDeaths = "?human wdt:P31 wd:Q5; wdt:P570 ?deathDate . FILTER (YEAR(?deathDate) = 2018)"; + // (wdt:P31 = "instance of"; wd:Q5 = "human", wdt:570 = "date of death") + final DataSource recentDeathsDataSource = new SparqlQueryResultDataSource(wikidataSparqlEndpoint, "human", + sparqlRecentDeaths); + final Predicate recentDeathsPredicate = Expressions.makePredicate("recentDeaths", 1); + kb.addFactsFromDataSource(recentDeathsPredicate, recentDeathsDataSource); + + final String sparqlRecentDeathsCause = sparqlRecentDeaths + "?human wdt:P509 ?causeOfDeath . "; + // (wdt:P509 = "cause of death") + final DataSource recentDeathsCauseDataSource = new SparqlQueryResultDataSource(wikidataSparqlEndpoint, + "human,causeOfDeath", sparqlRecentDeathsCause); + final Predicate recentDeathsCausePredicate = Expressions.makePredicate("recentDeathsCause", 2); + kb.addFactsFromDataSource(recentDeathsCausePredicate, recentDeathsCauseDataSource); + + try (VLogReasoner reasoner = new VLogReasoner(kb)) { System.out.println("Rules configured:\n--"); kb.getRules().forEach(System.out::println); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java index 5978bcc99..769c36cbc 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java @@ -44,7 +44,6 @@ import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.KnowledgeBaseImpl; import org.semanticweb.vlog4j.examples.ExamplesUtils; /** @@ -130,15 +129,13 @@ public static void main(final String[] args) final PositiveLiteral isPartOfIDBYX = makePositiveLiteral(isPartOfIDB, y, x); final Rule rule8 = makeRule(hasPartIDBXY, isPartOfIDBYX); - /* - * 2. Loading, reasoning, and querying while using try-with-resources to close - * the reasoner automatically. - */ - final KnowledgeBase kb = new KnowledgeBaseImpl(); - kb.addRules(rule1, rule2, rule3, rule4, rule5, rule6, rule7, rule8); - - try (final Reasoner reasoner = Reasoner.getInstance(kb)) { - + try (final Reasoner reasoner = Reasoner.getInstance()) { + /* + * 2. Loading, reasoning, and querying while using try-with-resources to close + * the reasoner automatically. + */ + final KnowledgeBase kb = reasoner.getKnowledgeBase(); + kb.addRules(rule1, rule2, rule3, rule4, rule5, rule6, rule7, rule8); /* Importing {@code .csv} files as data sources. */ final DataSource bicycleEDBDataSource = new CsvFileDataSource( new File(ExamplesUtils.INPUT_FOLDER + "bicycleEDB.csv.gz")); @@ -146,9 +143,9 @@ public static void main(final String[] args) new File(ExamplesUtils.INPUT_FOLDER + "hasPartEDB.csv.gz")); final DataSource wheelDataSource = new CsvFileDataSource( new File(ExamplesUtils.INPUT_FOLDER + "wheelEDB.csv.gz")); - reasoner.addFactsFromDataSource(bicycleEDB, bicycleEDBDataSource); - reasoner.addFactsFromDataSource(hasPartEDB, hasPartDataSource); - reasoner.addFactsFromDataSource(wheelEDB, wheelDataSource); + kb.addFactsFromDataSource(bicycleEDB, bicycleEDBDataSource); + kb.addFactsFromDataSource(hasPartEDB, hasPartDataSource); + kb.addFactsFromDataSource(wheelEDB, wheelDataSource); reasoner.load(); System.out.println("Before materialisation:"); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java index 075d92ad7..682faf6da 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java @@ -43,7 +43,6 @@ import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.reasoner.implementation.KnowledgeBaseImpl; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.examples.ExamplesUtils; @@ -130,19 +129,18 @@ public static void main(final String[] args) final PositiveLiteral isPartOfIDBReversed = makePositiveLiteral(triplesIDB, o, isPartOfPredicate, s); final Rule rule5 = makeRule(hasPartIDB, isPartOfIDBReversed); - /* - * 2. Loading, reasoning, querying and exporting, while using try-with-resources - * to close the reasoner automatically. - */ - final KnowledgeBase kb = new KnowledgeBaseImpl(); - kb.addRules(rule1, rule2, rule3, rule4, rule5); - - try (final Reasoner reasoner = Reasoner.getInstance(kb)) { + try (final Reasoner reasoner = Reasoner.getInstance()) { + /* + * 2. Loading, reasoning, querying and exporting, while using try-with-resources + * to close the reasoner automatically. + */ + final KnowledgeBase kb = reasoner.getKnowledgeBase(); + kb.addRules(rule1, rule2, rule3, rule4, rule5); /* Importing {@code .nt.gz} file as data source. */ final DataSource triplesEDBDataSource = new RdfFileDataSource( new File(ExamplesUtils.INPUT_FOLDER + "ternaryBicycleEDB.nt.gz")); - reasoner.addFactsFromDataSource(triplesEDB, triplesEDBDataSource); + kb.addFactsFromDataSource(triplesEDB, triplesEDBDataSource); reasoner.load(); System.out.println("Before materialisation:"); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java index 1300dcb5b..3977716f5 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java @@ -39,7 +39,6 @@ import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.reasoner.implementation.KnowledgeBaseImpl; import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; @@ -122,16 +121,15 @@ public static void main(final String[] args) final Predicate titleOfPublicationThatHasAuthorsWhoParentTheSameChild = Expressions .makePredicate("publicationAndAuthorsWhoParentTheSameChild", 3); - final KnowledgeBase kb = new KnowledgeBaseImpl(); - - try (Reasoner reasoner = Reasoner.getInstance(kb)) { + try (Reasoner reasoner = Reasoner.getInstance()) { + final KnowledgeBase kb = reasoner.getKnowledgeBase(); /* * The SPARQL query results will be added to the reasoner knowledge base, as * facts associated to the predicate * titleOfPublicationThatHasAuthorsWhoParentTheSameChild. */ - reasoner.addFactsFromDataSource(titleOfPublicationThatHasAuthorsWhoParentTheSameChild, + kb.addFactsFromDataSource(titleOfPublicationThatHasAuthorsWhoParentTheSameChild, sparqlQueryResultDataSource); reasoner.load(); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java index 98763f51f..797e2bc0e 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java @@ -42,7 +42,6 @@ import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.reasoner.implementation.KnowledgeBaseImpl; /** * This class exemplifies setting a log file and log level for VLog reasoner @@ -97,12 +96,11 @@ public class ConfigureReasonerLogging { public static void main(final String[] args) throws EdbIdbSeparationException, IncompatiblePredicateArityException, IOException, ReasonerStateException { - final KnowledgeBase kb = new KnowledgeBaseImpl(); - kb.addRules(rules); - try (final Reasoner reasoner = Reasoner.getInstance(kb)) { - - reasoner.addFacts(fact); + try (final Reasoner reasoner = Reasoner.getInstance()) { + final KnowledgeBase kb = reasoner.getKnowledgeBase(); + kb.addRules(rules); + kb.addFacts(fact); /* * Default reasoner log level is WARNING. diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java index 15d844ff2..cc22f6382 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java @@ -29,12 +29,11 @@ import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.Algorithm; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.reasoner.implementation.KnowledgeBaseImpl; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogKnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; import org.semanticweb.vlog4j.examples.ExamplesUtils; /** @@ -123,12 +122,12 @@ public static void main(final String[] args) * 2. Loading, reasoning, and querying. Use try-with resources, or remember to * call close() to free the reasoner resources. */ - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); kb.addRules(rule1, rule2, rule3, rule4, rule5, rule6, rule7, rule8); + kb.addFacts(fact1, fact2, fact3, fact4); - try (Reasoner reasoner = Reasoner.getInstance(kb)) { + try (VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.addFacts(fact1, fact2, fact3, fact4); reasoner.load(); /* See that there is no fact HasPartIDB before reasoning. */ diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java index 8cdd0a459..2f8155492 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java @@ -32,7 +32,6 @@ import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.reasoner.implementation.KnowledgeBaseImpl; import org.semanticweb.vlog4j.examples.ExamplesUtils; import org.semanticweb.vlog4j.graal.GraalConjunctiveQueryToRule; import org.semanticweb.vlog4j.graal.GraalToVLog4JModelConverter; @@ -105,14 +104,14 @@ public static void main(final String[] args) * 3. Loading, reasoning, and querying while using try-with-resources to close * the reasoner automatically. */ - final KnowledgeBase kb = new KnowledgeBaseImpl(); - kb.addRules(GraalToVLog4JModelConverter.convertRules(graalRules)); - for (final GraalConjunctiveQueryToRule graalConjunctiveQueryToRule : convertedConjunctiveQueries) { - kb.addRules(graalConjunctiveQueryToRule.getRule()); - } - try (Reasoner reasoner = Reasoner.getInstance(kb)) { - reasoner.addFacts(GraalToVLog4JModelConverter.convertAtoms(graalAtoms)); + try (Reasoner reasoner = Reasoner.getInstance()) { + final KnowledgeBase kb = reasoner.getKnowledgeBase(); + kb.addRules(GraalToVLog4JModelConverter.convertRules(graalRules)); + for (final GraalConjunctiveQueryToRule graalConjunctiveQueryToRule : convertedConjunctiveQueries) { + kb.addRules(graalConjunctiveQueryToRule.getRule()); + } + kb.addFacts(GraalToVLog4JModelConverter.convertAtoms(graalAtoms)); reasoner.load(); System.out.println("Before materialisation:"); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java index 033da03e0..1d2ffa5cb 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java @@ -25,12 +25,11 @@ import java.util.List; import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.reasoner.implementation.KnowledgeBaseImpl; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogKnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; import org.semanticweb.vlog4j.examples.ExamplesUtils; import org.semanticweb.vlog4j.graal.GraalConjunctiveQueryToRule; import org.semanticweb.vlog4j.graal.GraalToVLog4JModelConverter; @@ -125,13 +124,12 @@ public static void main(final String[] args) * 4. Loading, reasoning, and querying while using try-with-resources to close * the reasoner automatically. */ - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); kb.addRules(GraalToVLog4JModelConverter.convertRules(graalRules)); kb.addRules(convertedGraalConjunctiveQuery.getRule()); + kb.addFacts(GraalToVLog4JModelConverter.convertAtoms(graalAtoms)); - try (Reasoner reasoner = Reasoner.getInstance(kb)) { - reasoner.addFacts(GraalToVLog4JModelConverter.convertAtoms(graalAtoms)); - + try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); System.out.println("Before materialisation:"); ExamplesUtils.printOutQueryAnswers(convertedGraalConjunctiveQuery.getQuery(), reasoner); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java index c159d8ebc..fce4b7434 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java @@ -35,13 +35,12 @@ import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.reasoner.implementation.KnowledgeBaseImpl; import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogKnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; import org.semanticweb.vlog4j.examples.ExamplesUtils; import org.semanticweb.vlog4j.owlapi.OwlToRulesConverter; @@ -86,12 +85,12 @@ public static void main(final String[] args) throws OWLOntologyCreationException } System.out.println(); - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); kb.addRules(new ArrayList<>(owlToRulesConverter.getRules())); + kb.addFacts(owlToRulesConverter.getFacts()); - try (Reasoner reasoner = Reasoner.getInstance(kb)) { + try (VLogReasoner reasoner = new VLogReasoner(kb)) { /* Load rules and facts obtained from the ontology */ - reasoner.addFacts(owlToRulesConverter.getFacts()); reasoner.load(); /* Reason over loaded ontology with the default algorithm Restricted Chase */ diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java index 207c4d160..0e59a44ac 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java @@ -44,13 +44,12 @@ import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.reasoner.implementation.KnowledgeBaseImpl; import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogKnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; import org.semanticweb.vlog4j.examples.ExamplesUtils; import org.semanticweb.vlog4j.rdf.RdfModelConverter; @@ -147,21 +146,20 @@ public static void main(final String[] args) throws IOException, RDFParseExcepti final Rule organizationRule = Expressions.makeRule(creatorOrganizationName, personHasAffiliation, affiliationWithOrganization, organizationHasName); - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); /* * The rule that maps people to their organization name based on facts extracted * from RDF triples is added to the Reasoner's knowledge base. */ kb.addRules(organizationRule); + /* + * Facts extracted from the RDF resources are added to the Reasoner's knowledge + * base. + */ + kb.addFacts(tripleFactsISWC2016); + kb.addFacts(tripleFactsISWC2017); - try (final Reasoner reasoner = Reasoner.getInstance(kb);) { - /* - * Facts extracted from the RDF resources are added to the Reasoner's knowledge - * base. - */ - reasoner.addFacts(tripleFactsISWC2016); - reasoner.addFacts(tripleFactsISWC2017); - + try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); reasoner.reason(); diff --git a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java b/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java index fd91e4032..24f2baac3 100644 --- a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java +++ b/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java @@ -49,7 +49,7 @@ import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.reasoner.implementation.KnowledgeBaseImpl; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogKnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; public class TestReasonOverRdfFacts { @@ -71,7 +71,7 @@ public void testCanLoadRdfFactsIntoReasoner() throws RDFParseException, RDFHandl RDFFormat.TURTLE); final Set facts = RdfModelConverter.rdfModelToPositiveLiterals(model); - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final KnowledgeBase kb = new VLogKnowledgeBase(); try (final Reasoner reasoner = Reasoner.getInstance(kb)) { reasoner.addFacts(facts); @@ -91,7 +91,7 @@ public void testQueryAnsweringOverRdfFacts() throws RDFParseException, RDFHandle RDFFormat.TURTLE); final Set facts = RdfModelConverter.rdfModelToPositiveLiterals(model); - final KnowledgeBase kb = new KnowledgeBaseImpl(); + final KnowledgeBase kb = new VLogKnowledgeBase(); try (final Reasoner reasoner = Reasoner.getInstance(kb)) { reasoner.addFacts(facts); From 25ae198f46a8b1dbfc86b69aeaa41b3ab60d8315 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Wed, 8 May 2019 17:00:18 +0200 Subject: [PATCH 0011/1003] fix compile errors --- .../vlog4j/rdf/TestReasonOverRdfFacts.java | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java b/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java index 24f2baac3..59f530683 100644 --- a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java +++ b/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java @@ -44,13 +44,13 @@ import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogKnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogKnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; public class TestReasonOverRdfFacts { @@ -71,10 +71,10 @@ public void testCanLoadRdfFactsIntoReasoner() throws RDFParseException, RDFHandl RDFFormat.TURTLE); final Set facts = RdfModelConverter.rdfModelToPositiveLiterals(model); - final KnowledgeBase kb = new VLogKnowledgeBase(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); + kb.addFacts(facts); - try (final Reasoner reasoner = Reasoner.getInstance(kb)) { - reasoner.addFacts(facts); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); final PositiveLiteral universalQuery = makePositiveLiteral(RDF_TRIPLE_PREDICATE_NAME, subject, predicate, @@ -91,10 +91,10 @@ public void testQueryAnsweringOverRdfFacts() throws RDFParseException, RDFHandle RDFFormat.TURTLE); final Set facts = RdfModelConverter.rdfModelToPositiveLiterals(model); - final KnowledgeBase kb = new VLogKnowledgeBase(); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); + kb.addFacts(facts); - try (final Reasoner reasoner = Reasoner.getInstance(kb)) { - reasoner.addFacts(facts); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); final Constant inventionPredicate = makeConstant("https://example.org/invention"); From ab8fb9c3f83675179b2b285b8150d0aec5cad138 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Mon, 27 May 2019 18:06:55 +0200 Subject: [PATCH 0012/1003] add materialisation state to question answers. --- vlog4j-core/LICENSE.txt | 402 +++++++++--------- ...ngState.java => MaterialisationState.java} | 17 +- .../vlog4j/core/reasoner/Reasoner.java | 18 +- .../implementation/QueryResultIterator.java | 12 +- .../implementation/VLogKnowledgeBase.java | 4 +- .../reasoner/implementation/VLogReasoner.java | 114 +++-- .../GeneratedAnonymousIndividualsTest.java | 14 +- .../implementation/ReasonerStateTest.java | 13 +- 8 files changed, 323 insertions(+), 271 deletions(-) rename vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/{ReasoningState.java => MaterialisationState.java} (69%) diff --git a/vlog4j-core/LICENSE.txt b/vlog4j-core/LICENSE.txt index 261eeb9e9..29f81d812 100644 --- a/vlog4j-core/LICENSE.txt +++ b/vlog4j-core/LICENSE.txt @@ -1,201 +1,201 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasoningState.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/MaterialisationState.java similarity index 69% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasoningState.java rename to vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/MaterialisationState.java index c71cd78f7..fed24a7b9 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasoningState.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/MaterialisationState.java @@ -21,25 +21,30 @@ */ /** - * Enum for different reasoning stages a {@link Reasoner} may be in, with respect to its {@link KnowledgeBase}. + * Enum for different states the materialisation of a {@link Reasoner}'s + * {@link KnowledgeBase} may be in. + * * @author Irina Dragoste * */ -public enum ReasoningState { +public enum MaterialisationState { //TODO should we have different states for incomplete due to halting, vs incomplete due to adding facts for non-negated rules? /** - * Reasoning has not completed. Query answering yields correct, but possibly incomplete answers. + * Reasoning has not completed. Query answering yields sound, but possibly + * incomplete answers. */ INCOMPLETE, /** - * Query answering may give incorrect answers. Re-reasoning ({@link Reasoner#reason()}) is required, in order to obtain correct results. + * Query answering may give incorrect answers. Re-materialisation + * ({@link Reasoner#reason()}) is required, in order to obtain correct results. */ WRONG, - /** - * Reasoning over current knowledge base is complete, and query answering yields correct and complete results. + /** + * Reasoning over current knowledge base is complete, and query answering yields + * sound and complete results. */ COMPLETE diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 63e11fa7e..692afcb1a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -355,12 +355,16 @@ void load() *
  • {@code false}, if reasoning has been interrupted before * completion.
  • * - * @throws IOException if I/O exceptions occur during reasoning. - * @throws ReasonerStateException if this method is called before loading - * ({@link Reasoner#load()} or after closing - * ({@link Reasoner#close()}). + * @throws IOException + * if I/O exceptions occur during reasoning. + * @throws ReasonerStateException + * if this method is called before loading ({@link Reasoner#load()} + * or after closing ({@link Reasoner#close()}). + * @throws IncompatiblePredicateArityException + * @throws EdbIdbSeparationException */ - boolean reason() throws IOException, ReasonerStateException; + boolean reason() + throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException; // TODO add examples to query javadoc /** @@ -460,7 +464,9 @@ QueryResultIterator answerQuery(@NonNull PositiveLiteral query, boolean includeB * extension. * */ - void exportQueryAnswersToCsv(@NonNull PositiveLiteral query, @NonNull String csvFilePath, boolean includeBlanks) + // TODO update javadoc with return type + MaterialisationState exportQueryAnswersToCsv(@NonNull PositiveLiteral query, @NonNull String csvFilePath, + boolean includeBlanks) throws ReasonerStateException, IOException; /** diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultIterator.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultIterator.java index c1ec04d9e..3e0933c75 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultIterator.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultIterator.java @@ -23,6 +23,7 @@ import java.util.Iterator; import org.semanticweb.vlog4j.core.model.api.QueryResult; +import org.semanticweb.vlog4j.core.reasoner.MaterialisationState; import karmaresearch.vlog.Term; import karmaresearch.vlog.TermQueryResultIterator; @@ -38,8 +39,13 @@ public class QueryResultIterator implements Iterator, AutoCloseable private final TermQueryResultIterator vLogTermQueryResultIterator; - public QueryResultIterator(TermQueryResultIterator termQueryResultIterator) { + private final MaterialisationState materialisationState; + + // TODO add reasoningState to constructor + public QueryResultIterator(final TermQueryResultIterator termQueryResultIterator, + final MaterialisationState materialisationState) { this.vLogTermQueryResultIterator = termQueryResultIterator; + this.materialisationState = materialisationState; } @Override @@ -58,4 +64,8 @@ public void close() { this.vLogTermQueryResultIterator.close(); } + public MaterialisationState getMaterialisationState() { + return this.materialisationState; + } + } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java index ea95b24c6..44a5db9c1 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java @@ -115,12 +115,12 @@ Map> getFactsForPredicate() { } Set getEdbPredicates() { - // TODO use chache + // TODO use cache return collectEdbPredicates(); } Set getIdbPredicates() { - // TODO use chache + // TODO use cache return collectIdbPredicates(); } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index beb960ab5..04c394d1f 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -14,6 +14,7 @@ import org.semanticweb.vlog4j.core.reasoner.DataSource; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.LogLevel; +import org.semanticweb.vlog4j.core.reasoner.MaterialisationState; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.ReasonerState; import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; @@ -59,6 +60,7 @@ public class VLogReasoner implements Reasoner { private final VLog vLog = new VLog(); private ReasonerState reasonerState = ReasonerState.BEFORE_LOADING; + private MaterialisationState materialisationState = MaterialisationState.INCOMPLETE; private LogLevel internalLogLevel = LogLevel.WARNING; private Algorithm algorithm = Algorithm.RESTRICTED_CHASE; @@ -79,15 +81,16 @@ public VLogReasoner(VLogKnowledgeBase knowledgeBase) { @Override public KnowledgeBase getKnowledgeBase() { - return knowledgeBase; + return this.knowledgeBase; } @Override public void setAlgorithm(final Algorithm algorithm) { Validate.notNull(algorithm, "Algorithm cannot be null!"); this.algorithm = algorithm; - if (reasonerState.equals(ReasonerState.AFTER_CLOSING)) + if (this.reasonerState.equals(ReasonerState.AFTER_CLOSING)) { LOGGER.warn("Setting algorithm on a closed reasoner."); + } } @Override @@ -101,8 +104,9 @@ public void setReasoningTimeout(Integer seconds) { Validate.isTrue(seconds > 0, "Only strictly positive timeout period alowed!", seconds); } this.timeoutAfterSeconds = seconds; - if (reasonerState.equals(ReasonerState.AFTER_CLOSING)) + if (this.reasonerState.equals(ReasonerState.AFTER_CLOSING)) { LOGGER.warn("Setting timeout on a closed reasoner."); + } } @Override @@ -129,9 +133,11 @@ public RuleRewriteStrategy getRuleRewriteStrategy() { @Override public void load() throws EdbIdbSeparationException, IOException, IncompatiblePredicateArityException, ReasonerStateException { - if (reasonerState.equals(ReasonerState.AFTER_CLOSING)) - throw new ReasonerStateException(reasonerState, "Loading is not allowed after closing."); + if (this.reasonerState.equals(ReasonerState.AFTER_CLOSING)) { + throw new ReasonerStateException(this.reasonerState, "Loading is not allowed after closing."); + } if (this.reasonerState != ReasonerState.BEFORE_LOADING) { + // TODO check if this is correct. LOGGER.warn("This method call is ineffective: the Reasoner has already been loaded."); } else { this.knowledgeBase.validateEdbIdbSeparation(); @@ -185,34 +191,52 @@ private void validateDataSourcePredicateArities() throws IncompatiblePredicateAr } @Override - public boolean reason() throws IOException, ReasonerStateException { - if (this.reasonerState == ReasonerState.BEFORE_LOADING) { - throw new ReasonerStateException(this.reasonerState, "Reasoning is not allowed before loading!"); - } else if (reasonerState.equals(ReasonerState.AFTER_CLOSING)) { - throw new ReasonerStateException(reasonerState, "Reasoning is not allowed after closing."); - } else if (this.reasonerState == ReasonerState.AFTER_REASONING) { - LOGGER.warn( - "This method call is ineffective: this Reasoner has already reasoned. Successive reason() calls are not supported. Call reset() to undo loading and reasoning and reload to be able to reason again"); - } else { - this.reasonerState = ReasonerState.AFTER_REASONING; + public boolean reason() + throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { + switch (this.reasonerState) { + case BEFORE_LOADING: + load(); + runChase(); + break; + case AFTER_LOADING: + // TODO check if changes occurred in the KB. If yes, only runChase(); otherwise, + // reset and reload. + runChase(); + break; + case AFTER_REASONING: + // TODO check if changes occurred in the KB. If yes, reset, reload, and run + // chase. If not, do nothing. + resetReasoner(); + load(); + runChase(); + break; + case AFTER_CLOSING: + throw new ReasonerStateException(this.reasonerState, "Reasoning is not allowed after closing."); + } + return this.reasoningCompleted; + } - final boolean skolemChase = this.algorithm == Algorithm.SKOLEM_CHASE; - try { - if (this.timeoutAfterSeconds == null) { - this.vLog.materialize(skolemChase); - this.reasoningCompleted = true; - } else { - this.reasoningCompleted = this.vLog.materialize(skolemChase, this.timeoutAfterSeconds); - } - } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); - } catch (final MaterializationException e) { - throw new RuntimeException( - "Knowledge base incompatible with stratified negation: either the Rules are not stratifiable, or the variables in negated atom cannot be bound.", - e); + private void runChase() { + this.reasonerState = ReasonerState.AFTER_REASONING; + + final boolean skolemChase = this.algorithm == Algorithm.SKOLEM_CHASE; + try { + if (this.timeoutAfterSeconds == null) { + this.vLog.materialize(skolemChase); + this.reasoningCompleted = true; + } else { + this.reasoningCompleted = this.vLog.materialize(skolemChase, this.timeoutAfterSeconds); } + this.materialisationState = this.reasoningCompleted ? MaterialisationState.COMPLETE + : MaterialisationState.INCOMPLETE; + + } catch (final NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state.", e); + } catch (final MaterializationException e) { + throw new RuntimeException( + "Knowledge base incompatible with stratified negation: either the Rules are not stratifiable, or the variables in negated atom cannot be bound.", + e); } - return this.reasoningCompleted; } @Override @@ -220,8 +244,8 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBla final boolean filterBlanks = !includeBlanks; if (this.reasonerState == ReasonerState.BEFORE_LOADING) { throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); - } else if (reasonerState.equals(ReasonerState.AFTER_CLOSING)) { - throw new ReasonerStateException(reasonerState, "Querying is not allowed after closing."); + } else if (this.reasonerState.equals(ReasonerState.AFTER_CLOSING)) { + throw new ReasonerStateException(this.reasonerState, "Querying is not allowed after closing."); } Validate.notNull(query, "Query atom must not be null!"); @@ -232,17 +256,17 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBla } catch (final NotStartedException e) { throw new RuntimeException("Inconsistent reasoner state.", e); } - return new QueryResultIterator(stringQueryResultIterator); + return new QueryResultIterator(stringQueryResultIterator, this.materialisationState); } @Override - public void exportQueryAnswersToCsv(final PositiveLiteral query, final String csvFilePath, + public MaterialisationState exportQueryAnswersToCsv(final PositiveLiteral query, final String csvFilePath, final boolean includeBlanks) throws ReasonerStateException, IOException { final boolean filterBlanks = !includeBlanks; if (this.reasonerState == ReasonerState.BEFORE_LOADING) { throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); - } else if (reasonerState.equals(ReasonerState.AFTER_CLOSING)) { - throw new ReasonerStateException(reasonerState, "Querying is not allowed after closing."); + } else if (this.reasonerState.equals(ReasonerState.AFTER_CLOSING)) { + throw new ReasonerStateException(this.reasonerState, "Querying is not allowed after closing."); } Validate.notNull(query, "Query atom must not be null!"); Validate.notNull(csvFilePath, "File to export query answer to must not be null!"); @@ -254,13 +278,15 @@ public void exportQueryAnswersToCsv(final PositiveLiteral query, final String cs } catch (final NotStartedException e) { throw new RuntimeException("Inconsistent reasoner state.", e); } + return this.materialisationState; } @Override public void resetReasoner() throws ReasonerStateException { // TODO what should happen to the KB? - if (this.reasonerState.equals(ReasonerState.AFTER_CLOSING)) - throw new ReasonerStateException(reasonerState, "Resetting is not allowed after closing."); + if (this.reasonerState.equals(ReasonerState.AFTER_CLOSING)) { + throw new ReasonerStateException(this.reasonerState, "Resetting is not allowed after closing."); + } this.reasonerState = ReasonerState.BEFORE_LOADING; this.vLog.stop(); LOGGER.warn( @@ -304,8 +330,9 @@ private void loadRules() { @Override public void setLogLevel(LogLevel logLevel) throws ReasonerStateException { - if (reasonerState.equals(ReasonerState.AFTER_CLOSING)) - throw new ReasonerStateException(reasonerState, "Setting log level is not allowed after closing."); + if (this.reasonerState.equals(ReasonerState.AFTER_CLOSING)) { + throw new ReasonerStateException(this.reasonerState, "Setting log level is not allowed after closing."); + } Validate.notNull(logLevel, "Log level cannot be null!"); this.internalLogLevel = logLevel; this.vLog.setLogLevel(ModelToVLogConverter.toVLogLogLevel(this.internalLogLevel)); @@ -318,8 +345,9 @@ public LogLevel getLogLevel() { @Override public void setLogFile(String filePath) throws ReasonerStateException { - if (reasonerState.equals(ReasonerState.AFTER_CLOSING)) - throw new ReasonerStateException(reasonerState, "Setting log file is not allowed after closing."); + if (this.reasonerState.equals(ReasonerState.AFTER_CLOSING)) { + throw new ReasonerStateException(this.reasonerState, "Setting log file is not allowed after closing."); + } this.vLog.setLogFile(filePath); } @@ -387,7 +415,7 @@ public CyclicityResult checkForCycles() throws ReasonerStateException, NotStarte @Override public void update(Observable o, Object arg) { - // TODO update reasoning state for query answering + // TODO update materialisation state for query answering // TODO compute KB diff } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java index 391b617d6..3d1c1af8d 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java @@ -84,7 +84,7 @@ public void testBlanksSkolemChaseNoRuleRewrite() reasoner.load(); reasoner.reason(); - reasoner.exportQueryAnswersToCsv(queryAtom, includeBlanksFilePath, true); + reasoner.exportQueryAnswersToCsv(this.queryAtom, includeBlanksFilePath, true); checkTowDistinctBlanksGenerated(reasoner); } @@ -102,7 +102,7 @@ public void testBlanksSkolemChaseSplitHeadPieces() reasoner.load(); reasoner.reason(); - reasoner.exportQueryAnswersToCsv(queryAtom, includeBlanksFilePath, true); + reasoner.exportQueryAnswersToCsv(this.queryAtom, includeBlanksFilePath, true); checkTowDistinctBlanksGenerated(reasoner); } @@ -118,7 +118,7 @@ public void testBlanksRestrictedChaseNoRuleRewrite() reasoner.load(); reasoner.reason(); - reasoner.exportQueryAnswersToCsv(queryAtom, includeBlanksFilePath, true); + reasoner.exportQueryAnswersToCsv(this.queryAtom, includeBlanksFilePath, true); checkTowDistinctBlanksGenerated(reasoner); } @@ -137,7 +137,7 @@ public void testBlanksRestrictedChaseSplitHeadPieces() reasoner.load(); reasoner.reason(); - reasoner.exportQueryAnswersToCsv(queryAtom, includeBlanksFilePath, true); + reasoner.exportQueryAnswersToCsv(this.queryAtom, includeBlanksFilePath, true); // expected fact: P(c, _:b) final List> csvContentIncludeBlanks = FileDataSourceTestUtils .getCSVContent(includeBlanksFilePath); @@ -149,7 +149,7 @@ public void testBlanksRestrictedChaseSplitHeadPieces() final String blank = csvContentIncludeBlanks.get(0).get(1); assertNotEquals("c", blank); - reasoner.exportQueryAnswersToCsv(queryAtom, excludeBlanksFilePath, false); + reasoner.exportQueryAnswersToCsv(this.queryAtom, excludeBlanksFilePath, false); final List> csvContentExcludeBlanks = FileDataSourceTestUtils .getCSVContent(excludeBlanksFilePath); assertTrue(csvContentExcludeBlanks.isEmpty()); @@ -158,7 +158,7 @@ public void testBlanksRestrictedChaseSplitHeadPieces() } private void checkTowDistinctBlanksGenerated(final Reasoner reasoner) - throws ReasonerStateException, IOException, EdbIdbSeparationException { + throws ReasonerStateException, IOException { // expected facts: P(c, _:b1), P(c, _:b2) final List> csvContentIncludeBlanks = FileDataSourceTestUtils.getCSVContent(includeBlanksFilePath); assertTrue(csvContentIncludeBlanks.size() == 2); @@ -172,7 +172,7 @@ private void checkTowDistinctBlanksGenerated(final Reasoner reasoner) assertNotEquals("c", blank1); assertNotEquals("c", blank2); - reasoner.exportQueryAnswersToCsv(queryAtom, excludeBlanksFilePath, false); + reasoner.exportQueryAnswersToCsv(this.queryAtom, excludeBlanksFilePath, false); final List> csvContentExcludeBlanks = FileDataSourceTestUtils.getCSVContent(excludeBlanksFilePath); assertTrue(csvContentExcludeBlanks.isEmpty()); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java index dcec9122e..9968521fd 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java @@ -185,7 +185,7 @@ public void testResetDiscardInferences() reasoner.reason(); try (final QueryResultIterator queryQxIterator = reasoner.answerQuery(ruleHeadQx, true)) { final Set> queryQxResults = QueryResultsUtils.collectQueryResults(queryQxIterator); - final Set> queryQxExpectedResults = new HashSet>(); + final Set> queryQxExpectedResults = new HashSet<>(); queryQxExpectedResults.add(Arrays.asList(c)); assertEquals(queryQxResults, queryQxExpectedResults); } @@ -198,7 +198,7 @@ public void testResetDiscardInferences() } try (final QueryResultIterator queryPxIterator = reasoner.answerQuery(ruleBodyPx, true)) { final Set> queryPxResults = QueryResultsUtils.collectQueryResults(queryPxIterator); - final Set> queryPxExpectedResults = new HashSet>(); + final Set> queryPxExpectedResults = new HashSet<>(); queryPxExpectedResults.add(Arrays.asList(c)); assertEquals(queryPxResults, queryPxExpectedResults); } @@ -290,11 +290,13 @@ public void testResetEmptyKnowledgeBase() } } - @Test(expected = ReasonerStateException.class) - public void testFailReasonBeforeLoad() throws ReasonerStateException, IOException { + @Test + public void testFailReasonBeforeLoad() + throws ReasonerStateException, IOException, EdbIdbSeparationException, IncompatiblePredicateArityException { try (final Reasoner reasoner = Reasoner.getInstance()) { reasoner.reason(); } + } @Test(expected = ReasonerStateException.class) @@ -305,7 +307,8 @@ public void testFailAnswerQueryBeforeLoad() throws ReasonerStateException { } @Test(expected = ReasonerStateException.class) - public void testFailExportQueryAnswerToCsvBeforeLoad() throws ReasonerStateException, IOException { + public void testFailExportQueryAnswerToCsvBeforeLoad() + throws ReasonerStateException, IOException { try (final Reasoner reasoner = Reasoner.getInstance()) { reasoner.exportQueryAnswersToCsv(exampleQueryAtom, FileDataSourceTestUtils.OUTPUT_FOLDER + "output.csv", true); From d2a8d75189231fee9293cda2a07a3ec696d03721 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Wed, 17 Jul 2019 15:12:51 +0200 Subject: [PATCH 0013/1003] add method makePositiveLiteralsRule that receives a list of literals as parameter --- .../core/model/implementation/Expressions.java | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java index 0bc8d118a..c266d3983 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java @@ -209,6 +209,17 @@ public static Conjunction makeConjunction(final Literal... literals) { return new ConjunctionImpl<>(Arrays.asList(literals)); } + /** + * Creates a {@code Conjunction} of {@link T} ({@link PositiveLiteral} type) + * objects. + * + * @param literals list of non-null positive literals + * @return a {@link Conjunction} corresponding to the input + */ + public static Conjunction makePositiveConjunction(final List literals) { + return new ConjunctionImpl<>(literals); + } + /** * Creates a {@code Conjunction} of {@link PositiveLiteral} objects. * @@ -230,7 +241,7 @@ public static Rule makeRule(final PositiveLiteral headLiteral, final Literal... return new RuleImpl(new ConjunctionImpl<>(Arrays.asList(headLiteral)), new ConjunctionImpl<>(Arrays.asList(bodyLiterals))); } - + /** * Creates a {@code Rule}. * @@ -249,12 +260,13 @@ public static Rule makeRule(final Conjunction head, final Conju * @param body conjunction of positive (non-negated) literals * @return a {@link Rule} corresponding to the input */ - public static Rule makePositiveLiteralsRule(final Conjunction head, final Conjunction body) { + public static Rule makePositiveLiteralsRule(final Conjunction head, + final Conjunction body) { final List bodyLiteralList = new ArrayList<>(body.getLiterals()); @NonNull final Conjunction literalsBody = makeConjunction(bodyLiteralList); return new RuleImpl(head, literalsBody); - + } } From e692d0e89a699233b8d5e3f479bd06e7b9aee967 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Wed, 17 Jul 2019 15:13:58 +0200 Subject: [PATCH 0014/1003] add vlog4j-parser module --- vlog4j-parser/pom.xml | 26 + .../vlog4j/parser/api/Prologue.java | 17 + .../vlog4j/parser/api/RuleParser.java | 23 + .../parser/implementation/LocalPrologue.java | 93 + .../implementation/PrologueException.java | 12 + .../parser/implementation/RuleParserBase.java | 349 ++++ .../parser/implementation/javacc/.gitignore | 5 + .../javacc/JavaCCRuleParser.java | 989 +++++++++++ .../implementation/javacc/JavaCCRuleParser.jj | 562 ++++++ .../javacc/JavaCCRuleParserTokenManager.java | 1500 +++++++++++++++++ 10 files changed, 3576 insertions(+) create mode 100644 vlog4j-parser/pom.xml create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/api/Prologue.java create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/api/RuleParser.java create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/LocalPrologue.java create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/PrologueException.java create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/RuleParserBase.java create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/.gitignore create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.java create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParserTokenManager.java diff --git a/vlog4j-parser/pom.xml b/vlog4j-parser/pom.xml new file mode 100644 index 000000000..dd1568c8b --- /dev/null +++ b/vlog4j-parser/pom.xml @@ -0,0 +1,26 @@ + + + 4.0.0 + + org.semanticweb.vlog4j + vlog4j-parent + 0.4.0-SNAPSHOT + + org.semanticweb.vlog4j + vlog4j-parser + 0.4.0-SNAPSHOT + vlog4j-parser + http://maven.apache.org + + UTF-8 + + + + junit + junit + 3.8.1 + test + + + diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/api/Prologue.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/api/Prologue.java new file mode 100644 index 000000000..d99bd8c38 --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/api/Prologue.java @@ -0,0 +1,17 @@ +package org.semanticweb.vlog4j.parser.api; + +import org.semanticweb.vlog4j.parser.implementation.PrologueException; + +public interface Prologue { + + String getBase() throws PrologueException; + + void setBase(String base) throws PrologueException; + + String getPrefix(String prefix) throws PrologueException; + + void setPrefix(String prefix, String iri) throws PrologueException; + + String resolvePName(String prefixedName) throws PrologueException; + +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/api/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/api/RuleParser.java new file mode 100644 index 000000000..0e91a7814 --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/api/RuleParser.java @@ -0,0 +1,23 @@ +package org.semanticweb.vlog4j.parser.api; + +import java.io.ByteArrayInputStream; +import java.io.InputStream; + +import org.semanticweb.vlog4j.parser.implementation.javacc.JavaCCRuleParser; + + +public class RuleParser extends JavaCCRuleParser { + + public RuleParser(InputStream stream) { + super(stream, "UTF-8"); + } + + public RuleParser(InputStream stream, String encoding) { + super(stream, encoding); + } + + + public RuleParser(String rules) { + super(new ByteArrayInputStream(rules.getBytes()), "UTF-8"); + } +} \ No newline at end of file diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/LocalPrologue.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/LocalPrologue.java new file mode 100644 index 000000000..600c9235d --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/LocalPrologue.java @@ -0,0 +1,93 @@ +package org.semanticweb.vlog4j.parser.implementation; + +import java.net.URI; +import java.util.HashMap; +import java.util.Map; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.semanticweb.vlog4j.parser.api.Prologue; + +final public class LocalPrologue implements Prologue { + + //??? Can I use default logguer + final static Logger logger = LoggerFactory.getLogger(LocalPrologue.class.getName()); + + private static Prologue prologue; + + Map prefixes; + URI baseURI; + + private LocalPrologue() { + prefixes = new HashMap(); + baseURI = null; + } + + public static synchronized Prologue getPrologue() { + // Lazy initialization + if (prologue == null) { + prologue = new LocalPrologue(); + logger.info("Creating new prologue"); + } else { + logger.info("Prologue previously defined"); + } + return prologue; + } + + public String getBase() throws PrologueException { + if (baseURI == null) + throw new PrologueException("@base not defined"); + return baseURI.toString(); + } + + public String getPrefix(String prefix) throws PrologueException { + if (!prefixes.containsKey(prefix)) + throw new PrologueException("@prefix " + prefix + " not defined"); + return prefixes.get(prefix).toString(); + } + + public void setPrefix(String prefix, String uri) throws PrologueException { + if (prefixes.containsKey(prefix)) { + throw new PrologueException("Can not re define @prefix: " + prefix); + } + URI newUri = URI.create(uri); + if (!newUri.isAbsolute()) { + newUri = baseURI.resolve(newUri); + } + logger.info("Setting new prefix: " + prefix + ", " + newUri.toString()); + prefixes.putIfAbsent(prefix, newUri); + } + + public void setBase(String baseString) throws PrologueException { + if (baseURI != null) + throw new PrologueException("Can not re define @base: " + baseURI.toString() + ", " + baseString); + URI newBase = URI.create(baseString); + if (!newBase.isAbsolute()) { + throw new PrologueException("Base must be ab absolute IRI: " + baseString); + } + baseURI = newBase; + } + + public String resolvePName(String prefixedName) throws PrologueException { + // from the parser we know that prefixedName is of the form: + // prefix:something + // remember that the prefixes are stored with the colon symbol + // This does not return the surrounding <> + + int idx = prefixedName.indexOf(":") + 1; + String prefix = prefixedName.substring(0, idx); + String sufix = prefixedName.substring(idx); + + if (prefixes.containsKey(prefix)) { + // if the last character of the fullUri is '#', the resolve method of + // java.net.URI does not work well + String fullUri = prefixes.get(prefix).toString(); + if (fullUri.charAt(fullUri.length() - 1) == '#') + return fullUri + sufix; + // if it is different, then it works + return prefixes.get(prefix).resolve(sufix).toString(); + } + throw new PrologueException("@prefix not found: " + prefixedName); + } + +} \ No newline at end of file diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/PrologueException.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/PrologueException.java new file mode 100644 index 000000000..c69b5b157 --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/PrologueException.java @@ -0,0 +1,12 @@ +package org.semanticweb.vlog4j.parser.implementation; + +public class PrologueException extends Exception { + /** + * + */ + private static final long serialVersionUID = 1L; + + public PrologueException(String errorMessage) { + super(errorMessage); + } +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/RuleParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/RuleParserBase.java new file mode 100644 index 000000000..e9db151f9 --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/RuleParserBase.java @@ -0,0 +1,349 @@ +package org.semanticweb.vlog4j.parser.implementation; + +import java.util.List; +import java.util.ArrayList; + +import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.parser.api.Prologue; + +import org.semanticweb.vlog4j.parser.implementation.javacc.ParseException; + +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeConstant; + +public class RuleParserBase { + protected Prologue localPrologue; + protected List listOfRules; + protected List listOfFacts; + protected List listOfQueries; + + public RuleParserBase() { + localPrologue = LocalPrologue.getPrologue(); + listOfRules = new ArrayList(); + listOfFacts = new ArrayList(); + listOfQueries = new ArrayList(); + } + + public RuleParserBase(Prologue prologue, List listOfRules, List listOfFacts, + List listOfQueries) { + this.localPrologue = prologue; + this.listOfRules = listOfRules; + this.listOfFacts = listOfFacts; + this.listOfQueries = listOfQueries; + } + + protected Constant createLiteralInteger(String lexicalForm) { + // this method should be eliminated + return makeConstant(lexicalForm); + } + + protected Constant createLiteralDouble(String lexicalForm) { + // this method should be eliminated + return makeConstant(lexicalForm); + } + + protected Constant createLiteralDecimal(String lexicalForm) { + // this method should be eliminated + return makeConstant(lexicalForm); + } + + protected static String unescapeStr(String s) throws ParseException { + return unescape(s, '\\', false, 1, 1); + } + + protected static String unescapeStr(String s, int line, int column) throws ParseException { + return unescape(s, '\\', false, line, column); + } + + protected static String unescape(String s, char escape, boolean pointCodeOnly, int line, int column) + throws ParseException { + int i = s.indexOf(escape); + + if (i == -1) + return s; + + // Dump the initial part straight into the string buffer + StringBuilder sb = new StringBuilder(s.substring(0, i)); + + for (; i < s.length(); i++) { + char ch = s.charAt(i); + // Keep line and column numbers. + switch (ch) { + case '\n': + case '\r': + line++; + column = 1; + break; + default: + column++; + break; + } + + if (ch != escape) { + sb.append(ch); + continue; + } + + // Escape + if (i >= s.length() - 1) + throw new ParseException("Illegal escape at end of string, line:" + line + ", column: " + column); + char ch2 = s.charAt(i + 1); + column = column + 1; + i = i + 1; + + // \\u and \\U + if (ch2 == 'u') { + // i points to the \ so i+6 is next character + if (i + 4 >= s.length()) + throw new ParseException("\\u escape too short, line:" + line + ", column: " + column); + int x = hex(s, i + 1, 4, line, column); + sb.append((char) x); + // Jump 1 2 3 4 -- already skipped \ and u + i = i + 4; + column = column + 4; + continue; + } + if (ch2 == 'U') { + // i points to the \ so i+6 is next character + if (i + 8 >= s.length()) + throw new ParseException("\\U escape too short, line:" + line + ", column: " + column); + int x = hex(s, i + 1, 8, line, column); + // Convert to UTF-16 codepoint pair. + sb.append((char) x); + // Jump 1 2 3 4 5 6 7 8 -- already skipped \ and u + i = i + 8; + column = column + 8; + continue; + } + + // Are we doing just point code escapes? + // If so, \X-anything else is legal as a literal "\" and "X" + + if (pointCodeOnly) { + sb.append('\\'); + sb.append(ch2); + i = i + 1; + continue; + } + + // Not just codepoints. Must be a legal escape. + char ch3 = 0; + switch (ch2) { + case 'n': + ch3 = '\n'; + break; + case 't': + ch3 = '\t'; + break; + case 'r': + ch3 = '\r'; + break; + case 'b': + ch3 = '\b'; + break; + case 'f': + ch3 = '\f'; + break; + case '\'': + ch3 = '\''; + break; + case '\"': + ch3 = '\"'; + break; + case '\\': + ch3 = '\\'; + break; + default: + throw new ParseException("Unknown escape: \\" + ch2 + ", line:" + line + ", column: " + column); + } + sb.append(ch3); + } + return sb.toString(); + } + + // Line and column that started the escape + protected static int hex(String s, int i, int len, int line, int column) throws ParseException { +// if ( i+len >= s.length() ) +// { +// +// } + int x = 0; + for (int j = i; j < i + len; j++) { + char ch = s.charAt(j); + column++; + int k = 0; + switch (ch) { + case '0': + k = 0; + break; + case '1': + k = 1; + break; + case '2': + k = 2; + break; + case '3': + k = 3; + break; + case '4': + k = 4; + break; + case '5': + k = 5; + break; + case '6': + k = 6; + break; + case '7': + k = 7; + break; + case '8': + k = 8; + break; + case '9': + k = 9; + break; + case 'A': + case 'a': + k = 10; + break; + case 'B': + case 'b': + k = 11; + break; + case 'C': + case 'c': + k = 12; + break; + case 'D': + case 'd': + k = 13; + break; + case 'E': + case 'e': + k = 14; + break; + case 'F': + case 'f': + k = 15; + break; + default: + throw new ParseException("Illegal hex escape: " + ch + ", line:" + line + ", column: " + column); + } + x = (x << 4) + k; + } + return x; + } + + /** Remove first and last characters (e.g. ' or "") from a string */ + protected static String stripQuotes(String s) { + return s.substring(1, s.length() - 1); + } + + /** Remove first 3 and last 3 characters (e.g. ''' or """) from a string */ + protected static String stripQuotes3(String s) { + return s.substring(3, s.length() - 3); + } + + /** remove the first n charcacters from the string */ + protected static String stripChars(String s, int n) { + return s.substring(n, s.length()); + } + + protected String strRDFLiteral(String data, String lang, String dt) { + // https://www.w3.org/TR/turtle/#grammar-production-String RDFLiteral + String ret = "\"" + data + "\""; + if (dt != null) { + return ret += "^^" + dt; + // return ret += "^^<" + dt+">"; + } + if (lang != null) { + // dt = "http://www.w3.org/1999/02/22-rdf-syntax-ns#langString" + return ret += "@" + lang; + } + // return ret + "^^http://www.w3.org/2001/XMLSchema#string"; + return ret + "^^"; + } + + protected static String unescapePName(String s, int line, int column) throws ParseException { + char escape = '\\'; + int idx = s.indexOf(escape); + + if (idx == -1) + return s; + + int len = s.length(); + StringBuilder sb = new StringBuilder(); + + for (int i = 0; i < len; i++) { + // Copied form unescape abobve - share! + char ch = s.charAt(i); + // Keep line and column numbers. + switch (ch) { + case '\n': + case '\r': + line++; + column = 1; + break; + default: + column++; + break; + } + + if (ch != escape) { + sb.append(ch); + continue; + } + + // Escape + if (i >= s.length() - 1) + throw new ParseException("Illegal escape at end of string, line:" + line + ", column: " + column); + char ch2 = s.charAt(i + 1); + column = column + 1; + i = i + 1; + + switch (ch2) { + case '~': + case '.': + case '-': + case '!': + case '$': + case '&': + case '\'': + case '(': + case ')': + case '*': + case '+': + case ',': + case ';': + case '=': + case ':': + case '/': + case '?': + case '#': + case '@': + case '%': + sb.append(ch2); + break; + default: + throw new ParseException("Illegal prefix name escape: " + ch2+", line:" + line + ", column: " + column); + } + } + return sb.toString(); + } + + public List getRules() { + return listOfRules; + } + + public List getFacts() { + return listOfFacts; + } + + public List getQueries() { + return listOfQueries; + } + + +} \ No newline at end of file diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/.gitignore b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/.gitignore new file mode 100644 index 000000000..61eb9ad4c --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/.gitignore @@ -0,0 +1,5 @@ +/JavaCCRuleParserConstants.java +/ParseException.java +/SimpleCharStream.java +/Token.java +/TokenMgrError.java diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.java new file mode 100644 index 000000000..662a25638 --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.java @@ -0,0 +1,989 @@ +/* Generated By:JavaCC: Do not edit this line. JavaCCRuleParser.java */ +package org.semanticweb.vlog4j.parser.implementation.javacc; + +import java.util.List; +import java.util.ArrayList; + +import org.semanticweb.vlog4j.parser.implementation.RuleParserBase; +import org.semanticweb.vlog4j.parser.implementation.PrologueException; + +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Literal; +import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.model.api.Constant; + +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePositiveLiteral; +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeNegativeLiteral; +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePositiveConjunction; +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeConjunction; +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeRule; +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeVariable; +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeConstant; + + +public class JavaCCRuleParser extends RuleParserBase implements JavaCCRuleParserConstants { + + final public void parse() throws ParseException, PrologueException { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case BASE: + base(); + break; + default: + jj_la1[0] = jj_gen; + ; + } + label_1: + while (true) { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case PREFIX: + ; + break; + default: + jj_la1[1] = jj_gen; + break label_1; + } + prefix(); + } + label_2: + while (true) { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case IRI: + case PNAME_NS: + case PNAME_LN: + case VARORPREDNAME: + ; + break; + default: + jj_la1[2] = jj_gen; + break label_2; + } + statement(); + } + jj_consume_token(0); + } + + final public void base() throws ParseException, PrologueException { + String iriString; + jj_consume_token(BASE); + iriString = IRIREF(); + jj_consume_token(DOT); + localPrologue.setBase(iriString); + } + + final public void prefix() throws ParseException, PrologueException { + Token t; + String iriString; + jj_consume_token(PREFIX); + t = jj_consume_token(PNAME_NS); + iriString = IRIREF(); + jj_consume_token(DOT); + //note that prefix includes the colon (:) + localPrologue.setPrefix(t.image, iriString); + } + + final public void statement() throws ParseException, PrologueException { + Rule r; + PositiveLiteral l; + if (jj_2_1(2147483647)) { + r = rule(); + listOfRules.add(r); + } else { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case IRI: + case PNAME_NS: + case PNAME_LN: + case VARORPREDNAME: + l = positiveLiteral(); + jj_consume_token(DOT); + if (l.getVariables().isEmpty()) + listOfFacts.add(l); + else + listOfQueries.add(l); + break; + default: + jj_la1[3] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + } + } + + final public Rule rule() throws ParseException, PrologueException { + List < PositiveLiteral > head; + List < Literal > body; + head = listOfPositiveLiterals(); + jj_consume_token(ARROW); + body = listOfLiterals(); + jj_consume_token(DOT); + {if (true) return makeRule(makePositiveConjunction(head), makeConjunction(body));} + throw new Error("Missing return statement in function"); + } + + final public List < PositiveLiteral > listOfPositiveLiterals() throws ParseException, PrologueException { + PositiveLiteral l; + List < PositiveLiteral > list = new ArrayList < PositiveLiteral > (); + l = positiveLiteral(); + list.add(l); + label_3: + while (true) { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case COMMA: + ; + break; + default: + jj_la1[4] = jj_gen; + break label_3; + } + jj_consume_token(COMMA); + l = positiveLiteral(); + list.add(l); + } + {if (true) return list;} + throw new Error("Missing return statement in function"); + } + + final public List < Literal > listOfLiterals() throws ParseException, PrologueException { + Literal l; + List < Literal > list = new ArrayList < Literal > (); + l = literal(); + list.add(l); + label_4: + while (true) { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case COMMA: + ; + break; + default: + jj_la1[5] = jj_gen; + break label_4; + } + jj_consume_token(COMMA); + l = literal(); + list.add(l); + } + {if (true) return list;} + throw new Error("Missing return statement in function"); + } + + final public Literal literal() throws ParseException, PrologueException { + Literal l = null; + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case IRI: + case PNAME_NS: + case PNAME_LN: + case VARORPREDNAME: + l = positiveLiteral(); + {if (true) return l;} + break; + case TILDE: + l = negativeLiteral(); + {if (true) return l;} + break; + default: + jj_la1[6] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + throw new Error("Missing return statement in function"); + } + + final public PositiveLiteral positiveLiteral() throws ParseException, PrologueException { + Token t; + List < Term > terms; + String predicateName; + predicateName = predicateName(); + jj_consume_token(LPAREN); + terms = listOfTerms(); + jj_consume_token(RPAREN); + {if (true) return makePositiveLiteral(predicateName, terms);} + throw new Error("Missing return statement in function"); + } + + final public NegativeLiteral negativeLiteral() throws ParseException, PrologueException { + List < Term > terms; + String predicateName; + jj_consume_token(TILDE); + predicateName = predicateName(); + jj_consume_token(LPAREN); + terms = listOfTerms(); + jj_consume_token(RPAREN); + {if (true) return makeNegativeLiteral(predicateName, terms);} + throw new Error("Missing return statement in function"); + } + + final public List < Term > listOfTerms() throws ParseException, PrologueException { + Term t; + List < Term > list = new ArrayList < Term > (); + t = term(); + list.add(t); + label_5: + while (true) { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case COMMA: + ; + break; + default: + jj_la1[7] = jj_gen; + break label_5; + } + jj_consume_token(COMMA); + t = term(); + list.add(t); + } + {if (true) return list;} + throw new Error("Missing return statement in function"); + } + + final public String predicateName() throws ParseException, PrologueException { + String s; + Token t; + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case IRI: + case PNAME_NS: + case PNAME_LN: + s = IRI(); + {if (true) return s;} + break; + case VARORPREDNAME: + t = jj_consume_token(VARORPREDNAME); + {if (true) return t.image;} + break; + default: + jj_la1[8] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + throw new Error("Missing return statement in function"); + } + + final public Term term() throws ParseException, PrologueException { + String s; + Token t; + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case IRI: + case PNAME_NS: + case PNAME_LN: + s = IRI(); + {if (true) return makeConstant(s);} + break; + case STRING_LITERAL1: + case STRING_LITERAL2: + case STRING_LITERAL_LONG1: + case STRING_LITERAL_LONG2: + s = RDFLiteral(); + {if (true) return makeConstant(s);} + break; + case VAR: + t = jj_consume_token(VAR); + {if (true) return makeVariable(t.image.substring(1));} + break; + default: + jj_la1[9] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + throw new Error("Missing return statement in function"); + } + +/** [16] */ + final public Constant NumericLiteral() throws ParseException { + Token t; + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case INTEGER: + t = jj_consume_token(INTEGER); + {if (true) return createLiteralInteger(t.image);} + break; + case DECIMAL: + t = jj_consume_token(DECIMAL); + {if (true) return createLiteralDecimal(t.image);} + break; + case DOUBLE: + t = jj_consume_token(DOUBLE); + {if (true) return createLiteralDouble(t.image);} + break; + default: + jj_la1[10] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + throw new Error("Missing return statement in function"); + } + + final public String RDFLiteral() throws ParseException, PrologueException { + Token t; + String lex = null; + String lang = null; // Optional lang tag and datatype. + String dt = null; + lex = String(); + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case LANGTAG: + case DATATYPE: + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case LANGTAG: + lang = Langtag(); + break; + case DATATYPE: + jj_consume_token(DATATYPE); + dt = IRI(); + break; + default: + jj_la1[11] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + break; + default: + jj_la1[12] = jj_gen; + ; + } + {if (true) return strRDFLiteral(lex, lang, dt);} + throw new Error("Missing return statement in function"); + } + + final public String Langtag() throws ParseException { + Token t; + t = jj_consume_token(LANGTAG); + String lang = stripChars(t.image, 1); + {if (true) return lang;} + throw new Error("Missing return statement in function"); + } + + final public String BooleanLiteral() throws ParseException { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case TRUE: + jj_consume_token(TRUE); + {if (true) return "true^^http://www.w3.org/2001/XMLSchema#boolean";} + break; + case FALSE: + jj_consume_token(FALSE); + {if (true) return "false^^http://www.w3.org/2001/XMLSchema#boolean";} + break; + default: + jj_la1[13] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + throw new Error("Missing return statement in function"); + } + + final public String String() throws ParseException { + Token t; + String lex; + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case STRING_LITERAL1: + t = jj_consume_token(STRING_LITERAL1); + lex = stripQuotes(t.image); + break; + case STRING_LITERAL2: + t = jj_consume_token(STRING_LITERAL2); + lex = stripQuotes(t.image); + break; + case STRING_LITERAL_LONG1: + t = jj_consume_token(STRING_LITERAL_LONG1); + lex = stripQuotes3(t.image); + break; + case STRING_LITERAL_LONG2: + t = jj_consume_token(STRING_LITERAL_LONG2); + lex = stripQuotes3(t.image); + break; + default: + jj_la1[14] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + lex = unescapeStr(lex, t.beginLine, t.beginColumn); + {if (true) return lex;} + throw new Error("Missing return statement in function"); + } + + final public String IRI() throws ParseException, PrologueException { + String iri; + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case IRI: + iri = IRIREF(); + break; + case PNAME_NS: + case PNAME_LN: + iri = PrefixedName(); + break; + default: + jj_la1[15] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + {if (true) return "<"+iri+">";} + throw new Error("Missing return statement in function"); + } + + final public String PrefixedName() throws ParseException, PrologueException { + Token t; + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case PNAME_LN: + t = jj_consume_token(PNAME_LN); + break; + case PNAME_NS: + t = jj_consume_token(PNAME_NS); + break; + default: + jj_la1[16] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + {if (true) return localPrologue.resolvePName(t.image);} + throw new Error("Missing return statement in function"); + } + + final public String IRIREF() throws ParseException { + Token t; + t = jj_consume_token(IRI); + // we remove '<' and '>' + {if (true) return t.image.substring(1,t.image.length()-1);} + throw new Error("Missing return statement in function"); + } + + private boolean jj_2_1(int xla) { + jj_la = xla; jj_lastpos = jj_scanpos = token; + try { return !jj_3_1(); } + catch(LookaheadSuccess ls) { return true; } + finally { jj_save(0, xla); } + } + + private boolean jj_3R_16() { + if (jj_3R_21()) return true; + return false; + } + + private boolean jj_3R_11() { + Token xsp; + xsp = jj_scanpos; + if (jj_3R_15()) { + jj_scanpos = xsp; + if (jj_3R_16()) return true; + } + return false; + } + + private boolean jj_3R_15() { + if (jj_3R_9()) return true; + return false; + } + + private boolean jj_3R_37() { + if (jj_3R_39()) return true; + return false; + } + + private boolean jj_3R_32() { + Token xsp; + xsp = jj_scanpos; + if (jj_3R_37()) { + jj_scanpos = xsp; + if (jj_3R_38()) return true; + } + return false; + } + + private boolean jj_3R_30() { + Token xsp; + xsp = jj_scanpos; + if (jj_scan_token(26)) { + jj_scanpos = xsp; + if (jj_scan_token(25)) return true; + } + return false; + } + + private boolean jj_3R_18() { + if (jj_scan_token(VARORPREDNAME)) return true; + return false; + } + + private boolean jj_3R_13() { + Token xsp; + xsp = jj_scanpos; + if (jj_3R_17()) { + jj_scanpos = xsp; + if (jj_3R_18()) return true; + } + return false; + } + + private boolean jj_3R_17() { + if (jj_3R_22()) return true; + return false; + } + + private boolean jj_3R_12() { + if (jj_scan_token(COMMA)) return true; + if (jj_3R_11()) return true; + return false; + } + + private boolean jj_3R_39() { + if (jj_scan_token(LANGTAG)) return true; + return false; + } + + private boolean jj_3R_27() { + if (jj_3R_30()) return true; + return false; + } + + private boolean jj_3R_26() { + if (jj_3R_29()) return true; + return false; + } + + private boolean jj_3R_8() { + if (jj_3R_11()) return true; + Token xsp; + while (true) { + xsp = jj_scanpos; + if (jj_3R_12()) { jj_scanpos = xsp; break; } + } + return false; + } + + private boolean jj_3R_20() { + if (jj_scan_token(COMMA)) return true; + if (jj_3R_19()) return true; + return false; + } + + private boolean jj_3R_22() { + Token xsp; + xsp = jj_scanpos; + if (jj_3R_26()) { + jj_scanpos = xsp; + if (jj_3R_27()) return true; + } + return false; + } + + private boolean jj_3R_14() { + if (jj_3R_19()) return true; + Token xsp; + while (true) { + xsp = jj_scanpos; + if (jj_3R_20()) { jj_scanpos = xsp; break; } + } + return false; + } + + private boolean jj_3R_28() { + if (jj_3R_31()) return true; + Token xsp; + xsp = jj_scanpos; + if (jj_3R_32()) jj_scanpos = xsp; + return false; + } + + private boolean jj_3R_10() { + if (jj_scan_token(COMMA)) return true; + if (jj_3R_9()) return true; + return false; + } + + private boolean jj_3R_7() { + if (jj_3R_9()) return true; + Token xsp; + while (true) { + xsp = jj_scanpos; + if (jj_3R_10()) { jj_scanpos = xsp; break; } + } + return false; + } + + private boolean jj_3R_36() { + if (jj_scan_token(STRING_LITERAL_LONG2)) return true; + return false; + } + + private boolean jj_3R_35() { + if (jj_scan_token(STRING_LITERAL_LONG1)) return true; + return false; + } + + private boolean jj_3R_34() { + if (jj_scan_token(STRING_LITERAL2)) return true; + return false; + } + + private boolean jj_3R_33() { + if (jj_scan_token(STRING_LITERAL1)) return true; + return false; + } + + private boolean jj_3R_21() { + if (jj_scan_token(TILDE)) return true; + if (jj_3R_13()) return true; + if (jj_scan_token(LPAREN)) return true; + if (jj_3R_14()) return true; + if (jj_scan_token(RPAREN)) return true; + return false; + } + + private boolean jj_3R_38() { + if (jj_scan_token(DATATYPE)) return true; + if (jj_3R_22()) return true; + return false; + } + + private boolean jj_3R_6() { + if (jj_3R_7()) return true; + if (jj_scan_token(ARROW)) return true; + if (jj_3R_8()) return true; + if (jj_scan_token(DOT)) return true; + return false; + } + + private boolean jj_3R_31() { + Token xsp; + xsp = jj_scanpos; + if (jj_3R_33()) { + jj_scanpos = xsp; + if (jj_3R_34()) { + jj_scanpos = xsp; + if (jj_3R_35()) { + jj_scanpos = xsp; + if (jj_3R_36()) return true; + } + } + } + return false; + } + + private boolean jj_3R_9() { + if (jj_3R_13()) return true; + if (jj_scan_token(LPAREN)) return true; + if (jj_3R_14()) return true; + if (jj_scan_token(RPAREN)) return true; + return false; + } + + private boolean jj_3R_29() { + if (jj_scan_token(IRI)) return true; + return false; + } + + private boolean jj_3_1() { + if (jj_3R_6()) return true; + return false; + } + + private boolean jj_3R_25() { + if (jj_scan_token(VAR)) return true; + return false; + } + + private boolean jj_3R_24() { + if (jj_3R_28()) return true; + return false; + } + + private boolean jj_3R_23() { + if (jj_3R_22()) return true; + return false; + } + + private boolean jj_3R_19() { + Token xsp; + xsp = jj_scanpos; + if (jj_3R_23()) { + jj_scanpos = xsp; + if (jj_3R_24()) { + jj_scanpos = xsp; + if (jj_3R_25()) return true; + } + } + return false; + } + + /** Generated Token Manager. */ + public JavaCCRuleParserTokenManager token_source; + SimpleCharStream jj_input_stream; + /** Current token. */ + public Token token; + /** Next token. */ + public Token jj_nt; + private int jj_ntk; + private Token jj_scanpos, jj_lastpos; + private int jj_la; + private int jj_gen; + final private int[] jj_la1 = new int[17]; + static private int[] jj_la1_0; + static private int[] jj_la1_1; + static { + jj_la1_init_0(); + jj_la1_init_1(); + } + private static void jj_la1_init_0() { + jj_la1_0 = new int[] {0x200,0x100,0x7000000,0x7000000,0x0,0x0,0x7000000,0x0,0x7000000,0x17780000,0x7000,0x20000000,0x20000000,0xc00,0x780000,0x7000000,0x6000000,}; + } + private static void jj_la1_init_1() { + jj_la1_1 = new int[] {0x0,0x0,0x10000000,0x10000000,0x200,0x200,0x10008000,0x200,0x10000000,0x0,0x0,0x200000,0x200000,0x0,0x0,0x0,0x0,}; + } + final private JJCalls[] jj_2_rtns = new JJCalls[1]; + private boolean jj_rescan = false; + private int jj_gc = 0; + + /** Constructor with InputStream. */ + public JavaCCRuleParser(java.io.InputStream stream) { + this(stream, null); + } + /** Constructor with InputStream and supplied encoding */ + public JavaCCRuleParser(java.io.InputStream stream, String encoding) { + try { jj_input_stream = new SimpleCharStream(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); } + token_source = new JavaCCRuleParserTokenManager(jj_input_stream); + token = new Token(); + jj_ntk = -1; + jj_gen = 0; + for (int i = 0; i < 17; i++) jj_la1[i] = -1; + for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); + } + + /** Reinitialise. */ + public void ReInit(java.io.InputStream stream) { + ReInit(stream, null); + } + /** Reinitialise. */ + public void ReInit(java.io.InputStream stream, String encoding) { + try { jj_input_stream.ReInit(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); } + token_source.ReInit(jj_input_stream); + token = new Token(); + jj_ntk = -1; + jj_gen = 0; + for (int i = 0; i < 17; i++) jj_la1[i] = -1; + for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); + } + + /** Constructor. */ + public JavaCCRuleParser(java.io.Reader stream) { + jj_input_stream = new SimpleCharStream(stream, 1, 1); + token_source = new JavaCCRuleParserTokenManager(jj_input_stream); + token = new Token(); + jj_ntk = -1; + jj_gen = 0; + for (int i = 0; i < 17; i++) jj_la1[i] = -1; + for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); + } + + /** Reinitialise. */ + public void ReInit(java.io.Reader stream) { + jj_input_stream.ReInit(stream, 1, 1); + token_source.ReInit(jj_input_stream); + token = new Token(); + jj_ntk = -1; + jj_gen = 0; + for (int i = 0; i < 17; i++) jj_la1[i] = -1; + for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); + } + + /** Constructor with generated Token Manager. */ + public JavaCCRuleParser(JavaCCRuleParserTokenManager tm) { + token_source = tm; + token = new Token(); + jj_ntk = -1; + jj_gen = 0; + for (int i = 0; i < 17; i++) jj_la1[i] = -1; + for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); + } + + /** Reinitialise. */ + public void ReInit(JavaCCRuleParserTokenManager tm) { + token_source = tm; + token = new Token(); + jj_ntk = -1; + jj_gen = 0; + for (int i = 0; i < 17; i++) jj_la1[i] = -1; + for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); + } + + private Token jj_consume_token(int kind) throws ParseException { + Token oldToken; + if ((oldToken = token).next != null) token = token.next; + else token = token.next = token_source.getNextToken(); + jj_ntk = -1; + if (token.kind == kind) { + jj_gen++; + if (++jj_gc > 100) { + jj_gc = 0; + for (int i = 0; i < jj_2_rtns.length; i++) { + JJCalls c = jj_2_rtns[i]; + while (c != null) { + if (c.gen < jj_gen) c.first = null; + c = c.next; + } + } + } + return token; + } + token = oldToken; + jj_kind = kind; + throw generateParseException(); + } + + static private final class LookaheadSuccess extends java.lang.Error { } + final private LookaheadSuccess jj_ls = new LookaheadSuccess(); + private boolean jj_scan_token(int kind) { + if (jj_scanpos == jj_lastpos) { + jj_la--; + if (jj_scanpos.next == null) { + jj_lastpos = jj_scanpos = jj_scanpos.next = token_source.getNextToken(); + } else { + jj_lastpos = jj_scanpos = jj_scanpos.next; + } + } else { + jj_scanpos = jj_scanpos.next; + } + if (jj_rescan) { + int i = 0; Token tok = token; + while (tok != null && tok != jj_scanpos) { i++; tok = tok.next; } + if (tok != null) jj_add_error_token(kind, i); + } + if (jj_scanpos.kind != kind) return true; + if (jj_la == 0 && jj_scanpos == jj_lastpos) throw jj_ls; + return false; + } + + +/** Get the next Token. */ + final public Token getNextToken() { + if (token.next != null) token = token.next; + else token = token.next = token_source.getNextToken(); + jj_ntk = -1; + jj_gen++; + return token; + } + +/** Get the specific Token. */ + final public Token getToken(int index) { + Token t = token; + for (int i = 0; i < index; i++) { + if (t.next != null) t = t.next; + else t = t.next = token_source.getNextToken(); + } + return t; + } + + private int jj_ntk() { + if ((jj_nt=token.next) == null) + return (jj_ntk = (token.next=token_source.getNextToken()).kind); + else + return (jj_ntk = jj_nt.kind); + } + + private java.util.List jj_expentries = new java.util.ArrayList(); + private int[] jj_expentry; + private int jj_kind = -1; + private int[] jj_lasttokens = new int[100]; + private int jj_endpos; + + private void jj_add_error_token(int kind, int pos) { + if (pos >= 100) return; + if (pos == jj_endpos + 1) { + jj_lasttokens[jj_endpos++] = kind; + } else if (jj_endpos != 0) { + jj_expentry = new int[jj_endpos]; + for (int i = 0; i < jj_endpos; i++) { + jj_expentry[i] = jj_lasttokens[i]; + } + jj_entries_loop: for (java.util.Iterator it = jj_expentries.iterator(); it.hasNext();) { + int[] oldentry = (int[])(it.next()); + if (oldentry.length == jj_expentry.length) { + for (int i = 0; i < jj_expentry.length; i++) { + if (oldentry[i] != jj_expentry[i]) { + continue jj_entries_loop; + } + } + jj_expentries.add(jj_expentry); + break jj_entries_loop; + } + } + if (pos != 0) jj_lasttokens[(jj_endpos = pos) - 1] = kind; + } + } + + /** Generate ParseException. */ + public ParseException generateParseException() { + jj_expentries.clear(); + boolean[] la1tokens = new boolean[63]; + if (jj_kind >= 0) { + la1tokens[jj_kind] = true; + jj_kind = -1; + } + for (int i = 0; i < 17; i++) { + if (jj_la1[i] == jj_gen) { + for (int j = 0; j < 32; j++) { + if ((jj_la1_0[i] & (1< jj_gen) { + jj_la = p.arg; jj_lastpos = jj_scanpos = p.first; + switch (i) { + case 0: jj_3_1(); break; + } + } + p = p.next; + } while (p != null); + } catch(LookaheadSuccess ls) { } + } + jj_rescan = false; + } + + private void jj_save(int index, int xla) { + JJCalls p = jj_2_rtns[index]; + while (p.gen > jj_gen) { + if (p.next == null) { p = p.next = new JJCalls(); break; } + p = p.next; + } + p.gen = jj_gen + xla - jj_la; p.first = token; p.arg = xla; + } + + static final class JJCalls { + int gen; + Token first; + int arg; + JJCalls next; + } + +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj new file mode 100644 index 000000000..005f25bd4 --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj @@ -0,0 +1,562 @@ +options +{ + // Use \ u escapes in streams AND use a reader for the query + // => get both raw and escaped unicode + //JAVA_UNICODE_ESCAPE = true; + //UNICODE_INPUT = false; + UNICODE_INPUT = true; + STATIC = false; + //DEBUG_PARSER = true; + //DEBUG_TOKEN_MANAGER = true ; +} + +PARSER_BEGIN(JavaCCRuleParser) +package org.semanticweb.vlog4j.parser.implementation.javacc; + +import java.util.List; +import java.util.ArrayList; + +import org.semanticweb.vlog4j.parser.implementation.RuleParserBase; +import org.semanticweb.vlog4j.parser.implementation.PrologueException; + +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Literal; +import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.model.api.Constant; + +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePositiveLiteral; +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeNegativeLiteral; +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePositiveConjunction; +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeConjunction; +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeRule; +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeVariable; +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeConstant; + + +public class JavaCCRuleParser extends RuleParserBase +{ +} + +PARSER_END(JavaCCRuleParser) + + +void parse() throws PrologueException: +{ +} +{ + ( base() )? + ( prefix() )* + ( statement() )* + < EOF > +} + +void base() throws PrologueException: +{ + String iriString; +} +{ + < BASE > iriString = IRIREF() < DOT > + { + localPrologue.setBase(iriString); + } +} + +void prefix() throws PrologueException: +{ + Token t; + String iriString; +} +{ + < PREFIX > t = < PNAME_NS > iriString = IRIREF() < DOT > + { + //note that prefix includes the colon (:) + localPrologue.setPrefix(t.image, iriString); + } +} + +void statement() throws PrologueException: +{ + Rule r; + PositiveLiteral l; +} +{ + LOOKAHEAD(rule()) r = rule() { listOfRules.add(r);} +| l = positiveLiteral() < DOT > + { + if (l.getVariables().isEmpty()) + listOfFacts.add(l); + else + listOfQueries.add(l); + } +} + +Rule rule() throws PrologueException: +{ + List < PositiveLiteral > head; + List < Literal > body; +} +{ + head = listOfPositiveLiterals() < ARROW > body = listOfLiterals() < DOT > + { return makeRule(makePositiveConjunction(head), makeConjunction(body)); } +} + +List < PositiveLiteral > listOfPositiveLiterals() throws PrologueException: +{ + PositiveLiteral l; + List < PositiveLiteral > list = new ArrayList < PositiveLiteral > (); +} +{ + l = positiveLiteral() { list.add(l); } + ( < COMMA > l = positiveLiteral() { list.add(l); } )* + { return list; } +} + +List < Literal > listOfLiterals() throws PrologueException: +{ + Literal l; + List < Literal > list = new ArrayList < Literal > (); +} +{ + l = literal() { list.add(l); } + ( < COMMA > l = literal() { list.add(l); } )* + { return list; } +} + +Literal literal() throws PrologueException: +{ + Literal l = null; +} +{ + l = positiveLiteral() { return l; } +| l = negativeLiteral() { return l; } +} + +PositiveLiteral positiveLiteral() throws PrologueException: +{ + Token t; + List < Term > terms; + String predicateName; +} +{ + predicateName = predicateName() < LPAREN > terms = listOfTerms() < RPAREN > + { return makePositiveLiteral(predicateName, terms); } +} + +NegativeLiteral negativeLiteral() throws PrologueException: +{ + List < Term > terms; + String predicateName; +} +{ + < TILDE > predicateName = predicateName() < LPAREN > terms = listOfTerms() < RPAREN > + { return makeNegativeLiteral(predicateName, terms); } +} + +List < Term > listOfTerms() throws PrologueException: +{ + Term t; + List < Term > list = new ArrayList < Term > (); +} +{ + t = term() { list.add(t); } + ( < COMMA > t = term() { list.add(t); } )* + { return list; } +} + +String predicateName() throws PrologueException: +{ + String s; + Token t; +} +{ + s = IRI() { return s; } +| t = < VARORPREDNAME > { return t.image; } +} + +Term term() throws PrologueException: +{ + String s; + Token t; +} +{ + s = IRI() { return makeConstant(s); } +| s = RDFLiteral() { return makeConstant(s); } +| t = < VAR > { return makeVariable(t.image.substring(1)); } +} + +/** [16] */ +Constant NumericLiteral() : +{ + Token t; +} +{ + t = < INTEGER > { return createLiteralInteger(t.image); } +| t = < DECIMAL > { return createLiteralDecimal(t.image); } +| t = < DOUBLE > { return createLiteralDouble(t.image); } +} + +String RDFLiteral() throws PrologueException: +{ + Token t; + String lex = null; + String lang = null; // Optional lang tag and datatype. + String dt = null; +} +{ + lex = String() ( lang = Langtag() | < DATATYPE > dt = IRI() )? + { return strRDFLiteral(lex, lang, dt); } +} + +String Langtag() : +{ + Token t; +} +{ + // Enumerate the directives here because they look like language tags. + ( + t = < LANGTAG > + ) + { + String lang = stripChars(t.image, 1); + return lang; + } +} + +String BooleanLiteral() : +{ +} +{ + < TRUE > { return "true^^http://www.w3.org/2001/XMLSchema#boolean"; } +| < FALSE > { return "false^^http://www.w3.org/2001/XMLSchema#boolean"; } +} + +String String(): +{ + Token t; + String lex; +} +{ + ( + t = < STRING_LITERAL1 > { lex = stripQuotes(t.image); } + | t = < STRING_LITERAL2 > { lex = stripQuotes(t.image); } + | t = < STRING_LITERAL_LONG1 > { lex = stripQuotes3(t.image); } + | t = < STRING_LITERAL_LONG2 > { lex = stripQuotes3(t.image); } + ) + { + lex = unescapeStr(lex, t.beginLine, t.beginColumn); + return lex; + } +} + +String IRI() throws PrologueException: +{ + String iri; +} +{ + ( + iri = IRIREF() + | iri = PrefixedName() + ) + { return "<"+iri+">"; } +} + +String PrefixedName() throws PrologueException: +{ + Token t; +} +{ + ( + t = < PNAME_LN > + | t = < PNAME_NS > + ) + { return localPrologue.resolvePName(t.image);} + //{ return localPrologue.resolvePName(t.image, t.beginLine, t.beginColumn);} +} + +String IRIREF() : +{ + Token t; +} +{ + t = < IRI > + { + // we remove '<' and '>' + return t.image.substring(1,t.image.length()-1); + } +} + +// ------------------------------------------ +// Tokens +// Comments and whitespace +SKIP : +{ + " " +| "\t" +| "\n" +| "\r" +| "\f" +} + +TOKEN : +{ + < #WS : + " " + | "\t" + | "\n" + | "\r" + | "\f" > +} + +SPECIAL_TOKEN : +{ + < SINGLE_LINE_COMMENT : + "#" (~[ "\n", "\r" ])* + ( + "\n" + | "\r" + | "\r\n" + )? > +} + +// ------------------------------------------------- +// Keywords : directives before LANGTAG +TOKEN : +{ + < PREFIX : "@prefix" > +| < BASE : "@base" > +} + +TOKEN [ IGNORE_CASE ] : +{ + < TRUE : "true" > +| < FALSE : "false" > + // ------------------------------------------------- +| < INTEGER : ([ "-", "+" ])? < DIGITS > > +| + < DECIMAL : + ([ "-", "+" ])? + ( + (< DIGITS >)+ "." (< DIGITS >)* + | "." (< DIGITS >)+ + ) + > + // Required exponent. +| < DOUBLE : + ([ "+", "-" ])? + ( + ([ "0"-"9" ])+ "." ([ "0"-"9" ])* < EXPONENT > + | "." ([ "0"-"9" ])+ (< EXPONENT >) + | ([ "0"-"9" ])+ < EXPONENT > + ) + > +| < #EXPONENT : [ "e", "E" ] ([ "+", "-" ])? ([ "0"-"9" ])+ > +| < #QUOTE_3D : "\"\"\"" > +| < #QUOTE_3S : "'''" > + // "u" done by javacc input stream. + // "U" escapes not supported yet for Java strings +| < ECHAR : + "\\" + ( + "t" + | "b" + | "n" + | "r" + | "f" + | "\\" + | "\"" + | "'" + ) > +| < STRING_LITERAL1 : + // Single quoted string + "'" + ( + (~[ "'", "\\", "\n", "\r" ]) + | < ECHAR > + )* + "'" > +| < STRING_LITERAL2 : + // Double quoted string + "\"" + ( + (~[ "\"", "\\", "\n", "\r" ]) + | < ECHAR > + )* + "\"" > +| < STRING_LITERAL_LONG1 : + < QUOTE_3S > + ( + ~[ "'", "\\" ] + | < ECHAR > + | ("'" ~[ "'" ]) + | ("''" ~[ "'" ]) + )* + < QUOTE_3S > > +| < STRING_LITERAL_LONG2 : + < QUOTE_3D > + ( + ~[ "\"", "\\" ] + | < ECHAR > + | ("\"" ~[ "\"" ]) + | ("\"\"" ~[ "\"" ]) + )* + < QUOTE_3D > > +| < DIGITS : ([ "0"-"9" ])+ > + // | +} + +TOKEN : +{ + // Includes # for relative URIs + < IRI : "<" (~[ ">", "<", "\"", "{", "}", "^", "\\", "|", "`", "\u0000"-"\u0020" ])* ">" > +| < PNAME_NS : (< PN_PREFIX >)? ":" > +| < PNAME_LN : < PNAME_NS > < PN_LOCAL > > +| < BLANK_NODE_LABEL : "_:" < PN_LOCAL > > +| < VAR : "?" < VARORPREDNAME > > +| < LANGTAG : + < AT > (< A2Z >)+ + ( + "-" (< A2ZN >)+ + )* > +| < #A2Z : [ "a"-"z", "A"-"Z" ] > +| < #A2ZN : [ "a"-"z", "A"-"Z", "0"-"9" ] > +} + +TOKEN : +{ + < LPAREN : "(" > +| < RPAREN : ")" > +| < NIL : + < LPAREN > + ( + < WS > + | < SINGLE_LINE_COMMENT > + )* + < RPAREN > > +| < LBRACE : "{" > +| < RBRACE : "}" > +| < LBRACKET : "[" > +| < RBRACKET : "]" > +| < ANON : + < LBRACKET > + ( + < WS > + | < SINGLE_LINE_COMMENT > + )* + < RBRACKET > > +| < SEMICOLON : ";" > +| < COMMA : "," > +| < DOT : "." > +} + +// Operator +TOKEN : +{ + < EQ : "=" > +| < ARROW : ":-" > +| < DOLLAR : "$" > +| < QMARK : "?" > +| < TILDE : "~" > +| < COLON : ":" > + // | < PLUS: "+" > + // | < MINUS: "-" > +| < STAR : "*" > +| < SLASH : "/" > +| < RSLASH : "\\" > +| < BOM : "\ufeff" > + //| < AMP: "&" > + //| < REM: "%" > +| < DATATYPE : "^^" > +| < AT : "@" > +} + +TOKEN : +{ + < #PN_CHARS_BASE : + [ "A"-"Z" ] + | [ "a"-"z" ] + | + [ "\u00c0"-"\u00d6" ] + | [ "\u00d8"-"\u00f6" ] + | [ "\u00f8"-"\u02ff" ] + | + [ "\u0370"-"\u037d" ] + | [ "\u037f"-"\u1fff" ] + | + [ "\u200c"-"\u200d" ] + | [ "\u2070"-"\u218f" ] + | [ "\u2c00"-"\u2fef" ] + | + [ "\u3001"-"\ud7ff" ] + | [ "\uf900"-"\ufffd" ] + > + // [#x10000-#xEFFFF] +| + < #PN_CHARS_U : + < PN_CHARS_BASE > + | "_" > +| + // No DOT + < #PN_CHARS : + ( + < PN_CHARS_U > + | "-" + | [ "0"-"9" ] + | "\u00b7" + | + [ "\u0300"-"\u036f" ] + | [ "\u203f"-"\u2040" ] + ) > +| + // No leading "_", no trailing ".", can have dot inside prefix name. + < #PN_PREFIX : + < PN_CHARS_BASE > + ( + ( + < PN_CHARS > + | "." + )* + < PN_CHARS > + )? > +| + // With a leading "_", no dot at end of local name. + < #PN_LOCAL : + ( + < PN_CHARS_U > + | [ "0"-"9" ] + ) + ( + ( + < PN_CHARS > + | "." + )* + < PN_CHARS > + )? > +| + // NCNAME without "-" and ".", allowing leading digits. + < VARORPREDNAME : + ( + < PN_CHARS_U > + | [ "0"-"9" ] + ) + ( + < PN_CHARS_U > + | [ "0"-"9" ] + | "\u00b7" + | + [ "\u0300"-"\u036f" ] + | [ "\u203f"-"\u2040" ] + )* > +} + +// Catch-all tokens. Must be last. +// Any non-whitespace. Causes a parser exception, rather than a +// token manager error (with hidden line numbers). +// Only bad IRIs (e.g. spaces) now give unhelpful parse errors. +TOKEN : +{ + < #UNKNOWN : (~[ " ", "\t", "\n", "\r", "\f" ])+ > +} + +SKIP :{< "%" (~["\n"])* "\n" >} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParserTokenManager.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParserTokenManager.java new file mode 100644 index 000000000..215dac1f9 --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParserTokenManager.java @@ -0,0 +1,1500 @@ +/* Generated By:JavaCC: Do not edit this line. JavaCCRuleParserTokenManager.java */ +package org.semanticweb.vlog4j.parser.implementation.javacc; +import java.util.List; +import java.util.ArrayList; +import org.semanticweb.vlog4j.parser.implementation.RuleParserBase; +import org.semanticweb.vlog4j.parser.implementation.PrologueException; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Literal; +import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.model.api.Constant; +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePositiveLiteral; +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeNegativeLiteral; +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePositiveConjunction; +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeConjunction; +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeRule; +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeVariable; +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeConstant; + +/** Token Manager. */ +public class JavaCCRuleParserTokenManager implements JavaCCRuleParserConstants +{ + + /** Debug output. */ + public java.io.PrintStream debugStream = System.out; + /** Set debug output. */ + public void setDebugStream(java.io.PrintStream ds) { debugStream = ds; } +private int jjStopAtPos(int pos, int kind) +{ + jjmatchedKind = kind; + jjmatchedPos = pos; + return pos + 1; +} +private int jjMoveStringLiteralDfa0_0() +{ + switch(curChar) + { + case 9: + jjmatchedKind = 2; + return jjMoveNfa_0(0, 0); + case 10: + jjmatchedKind = 3; + return jjMoveNfa_0(0, 0); + case 12: + jjmatchedKind = 5; + return jjMoveNfa_0(0, 0); + case 13: + jjmatchedKind = 4; + return jjMoveNfa_0(0, 0); + case 32: + jjmatchedKind = 1; + return jjMoveNfa_0(0, 0); + case 36: + jjmatchedKind = 45; + return jjMoveNfa_0(0, 0); + case 40: + jjmatchedKind = 32; + return jjMoveNfa_0(0, 0); + case 41: + jjmatchedKind = 33; + return jjMoveNfa_0(0, 0); + case 42: + jjmatchedKind = 49; + return jjMoveNfa_0(0, 0); + case 44: + jjmatchedKind = 41; + return jjMoveNfa_0(0, 0); + case 46: + jjmatchedKind = 42; + return jjMoveNfa_0(0, 0); + case 47: + jjmatchedKind = 50; + return jjMoveNfa_0(0, 0); + case 58: + jjmatchedKind = 48; + return jjMoveStringLiteralDfa1_0(0x100000000000L); + case 59: + jjmatchedKind = 40; + return jjMoveNfa_0(0, 0); + case 61: + jjmatchedKind = 43; + return jjMoveNfa_0(0, 0); + case 63: + jjmatchedKind = 46; + return jjMoveNfa_0(0, 0); + case 64: + jjmatchedKind = 54; + return jjMoveStringLiteralDfa1_0(0x300L); + case 70: + return jjMoveStringLiteralDfa1_0(0x800L); + case 84: + return jjMoveStringLiteralDfa1_0(0x400L); + case 91: + jjmatchedKind = 37; + return jjMoveNfa_0(0, 0); + case 92: + jjmatchedKind = 51; + return jjMoveNfa_0(0, 0); + case 93: + jjmatchedKind = 38; + return jjMoveNfa_0(0, 0); + case 94: + return jjMoveStringLiteralDfa1_0(0x20000000000000L); + case 102: + return jjMoveStringLiteralDfa1_0(0x800L); + case 116: + return jjMoveStringLiteralDfa1_0(0x400L); + case 123: + jjmatchedKind = 35; + return jjMoveNfa_0(0, 0); + case 125: + jjmatchedKind = 36; + return jjMoveNfa_0(0, 0); + case 126: + jjmatchedKind = 47; + return jjMoveNfa_0(0, 0); + case 65279: + jjmatchedKind = 52; + return jjMoveNfa_0(0, 0); + default : + return jjMoveNfa_0(0, 0); + } +} +private int jjMoveStringLiteralDfa1_0(long active0) +{ + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + return jjMoveNfa_0(0, 0); + } + switch(curChar) + { + case 45: + if ((active0 & 0x100000000000L) != 0L) + { + jjmatchedKind = 44; + jjmatchedPos = 1; + } + break; + case 65: + return jjMoveStringLiteralDfa2_0(active0, 0x800L); + case 82: + return jjMoveStringLiteralDfa2_0(active0, 0x400L); + case 94: + if ((active0 & 0x20000000000000L) != 0L) + { + jjmatchedKind = 53; + jjmatchedPos = 1; + } + break; + case 97: + return jjMoveStringLiteralDfa2_0(active0, 0x800L); + case 98: + return jjMoveStringLiteralDfa2_0(active0, 0x200L); + case 112: + return jjMoveStringLiteralDfa2_0(active0, 0x100L); + case 114: + return jjMoveStringLiteralDfa2_0(active0, 0x400L); + default : + break; + } + return jjMoveNfa_0(0, 1); +} +private int jjMoveStringLiteralDfa2_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjMoveNfa_0(0, 1); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + return jjMoveNfa_0(0, 1); + } + switch(curChar) + { + case 76: + return jjMoveStringLiteralDfa3_0(active0, 0x800L); + case 85: + return jjMoveStringLiteralDfa3_0(active0, 0x400L); + case 97: + return jjMoveStringLiteralDfa3_0(active0, 0x200L); + case 108: + return jjMoveStringLiteralDfa3_0(active0, 0x800L); + case 114: + return jjMoveStringLiteralDfa3_0(active0, 0x100L); + case 117: + return jjMoveStringLiteralDfa3_0(active0, 0x400L); + default : + break; + } + return jjMoveNfa_0(0, 2); +} +private int jjMoveStringLiteralDfa3_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjMoveNfa_0(0, 2); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + return jjMoveNfa_0(0, 2); + } + switch(curChar) + { + case 69: + if ((active0 & 0x400L) != 0L) + { + jjmatchedKind = 10; + jjmatchedPos = 3; + } + break; + case 83: + return jjMoveStringLiteralDfa4_0(active0, 0x800L); + case 101: + if ((active0 & 0x400L) != 0L) + { + jjmatchedKind = 10; + jjmatchedPos = 3; + } + return jjMoveStringLiteralDfa4_0(active0, 0x100L); + case 115: + return jjMoveStringLiteralDfa4_0(active0, 0xa00L); + default : + break; + } + return jjMoveNfa_0(0, 3); +} +private int jjMoveStringLiteralDfa4_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjMoveNfa_0(0, 3); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + return jjMoveNfa_0(0, 3); + } + switch(curChar) + { + case 69: + if ((active0 & 0x800L) != 0L) + { + jjmatchedKind = 11; + jjmatchedPos = 4; + } + break; + case 101: + if ((active0 & 0x200L) != 0L) + { + jjmatchedKind = 9; + jjmatchedPos = 4; + } + else if ((active0 & 0x800L) != 0L) + { + jjmatchedKind = 11; + jjmatchedPos = 4; + } + break; + case 102: + return jjMoveStringLiteralDfa5_0(active0, 0x100L); + default : + break; + } + return jjMoveNfa_0(0, 4); +} +private int jjMoveStringLiteralDfa5_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjMoveNfa_0(0, 4); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + return jjMoveNfa_0(0, 4); + } + switch(curChar) + { + case 105: + return jjMoveStringLiteralDfa6_0(active0, 0x100L); + default : + break; + } + return jjMoveNfa_0(0, 5); +} +private int jjMoveStringLiteralDfa6_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjMoveNfa_0(0, 5); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + return jjMoveNfa_0(0, 5); + } + switch(curChar) + { + case 120: + if ((active0 & 0x100L) != 0L) + { + jjmatchedKind = 8; + jjmatchedPos = 6; + } + break; + default : + break; + } + return jjMoveNfa_0(0, 6); +} +static final long[] jjbitVec0 = { + 0xfffffffffffffffeL, 0xffffffffffffffffL, 0xffffffffffffffffL, 0xffffffffffffffffL +}; +static final long[] jjbitVec2 = { + 0x0L, 0x0L, 0xffffffffffffffffL, 0xffffffffffffffffL +}; +static final long[] jjbitVec3 = { + 0xfffe7000fffffff6L, 0xffffffffffffffffL, 0xffffffffffffffffL, 0x7e00000000ffffffL +}; +static final long[] jjbitVec4 = { + 0x0L, 0x0L, 0x0L, 0xff7fffffff7fffffL +}; +static final long[] jjbitVec5 = { + 0x0L, 0xbfff000000000000L, 0xffffffffffffffffL, 0xffffffffffffffffL +}; +static final long[] jjbitVec6 = { + 0x3000L, 0xffff000000000000L, 0xffffffffffffffffL, 0xffffffffffffffffL +}; +static final long[] jjbitVec7 = { + 0xffffffffffffffffL, 0xffffffffffffffffL, 0xffffL, 0x0L +}; +static final long[] jjbitVec8 = { + 0xffffffffffffffffL, 0xffffffffffffffffL, 0xffffffffffffffffL, 0xffffffffffffL +}; +static final long[] jjbitVec9 = { + 0xffffffffffffffffL, 0xffffffffffffffffL, 0xffffffffffffffffL, 0x3fffffffffffffffL +}; +static final long[] jjbitVec10 = { + 0x0L, 0x0L, 0x80000000000000L, 0xff7fffffff7fffffL +}; +static final long[] jjbitVec11 = { + 0xffffffffffffffffL, 0xbfffffffffffffffL, 0xffffffffffffffffL, 0xffffffffffffffffL +}; +static final long[] jjbitVec12 = { + 0x8000000000003000L, 0xffff000000000001L, 0xffffffffffffffffL, 0xffffffffffffffffL +}; +private int jjMoveNfa_0(int startState, int curPos) +{ + int strKind = jjmatchedKind; + int strPos = jjmatchedPos; + int seenUpto; + input_stream.backup(seenUpto = curPos + 1); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { throw new Error("Internal Error"); } + curPos = 0; + int startsAt = 0; + jjnewStateCnt = 114; + int i = 1; + jjstateSet[0] = startState; + int kind = 0x7fffffff; + for (;;) + { + if (++jjround == 0x7fffffff) + ReInitRounds(); + if (curChar < 64) + { + long l = 1L << curChar; + do + { + switch(jjstateSet[--i]) + { + case 0: + if ((0x3ff000000000000L & l) != 0L) + { + if (kind > 12) + kind = 12; + jjCheckNAddStates(0, 7); + } + else if ((0x280000000000L & l) != 0L) + jjCheckNAddStates(8, 12); + else if (curChar == 58) + { + if (kind > 25) + kind = 25; + jjCheckNAdd(110); + } + else if (curChar == 46) + jjCheckNAddTwoStates(83, 85); + else if (curChar == 37) + jjCheckNAddTwoStates(75, 76); + else if (curChar == 40) + jjCheckNAddStates(13, 15); + else if (curChar == 60) + jjCheckNAddTwoStates(42, 43); + else if (curChar == 34) + jjstateSet[jjnewStateCnt++] = 39; + else if (curChar == 39) + jjstateSet[jjnewStateCnt++] = 27; + else if (curChar == 35) + { + if (kind > 7) + kind = 7; + jjCheckNAddStates(16, 18); + } + else if (curChar == 63) + jjstateSet[jjnewStateCnt++] = 50; + if ((0x3ff000000000000L & l) != 0L) + { + if (kind > 60) + kind = 60; + jjCheckNAdd(73); + } + else if (curChar == 34) + jjCheckNAddStates(19, 21); + else if (curChar == 39) + jjCheckNAddStates(22, 24); + break; + case 1: + if ((0xffffffffffffdbffL & l) == 0L) + break; + if (kind > 7) + kind = 7; + jjCheckNAddStates(16, 18); + break; + case 2: + if ((0x2400L & l) != 0L && kind > 7) + kind = 7; + break; + case 3: + if (curChar == 10 && kind > 7) + kind = 7; + break; + case 4: + if (curChar == 13) + jjstateSet[jjnewStateCnt++] = 3; + break; + case 6: + if ((0x8400000000L & l) != 0L && kind > 18) + kind = 18; + break; + case 7: + if (curChar == 39) + jjCheckNAddStates(22, 24); + break; + case 8: + if ((0xffffff7fffffdbffL & l) != 0L) + jjCheckNAddStates(22, 24); + break; + case 10: + if ((0x8400000000L & l) != 0L) + jjCheckNAddStates(22, 24); + break; + case 11: + if (curChar == 39 && kind > 19) + kind = 19; + break; + case 12: + if (curChar == 34) + jjCheckNAddStates(19, 21); + break; + case 13: + if ((0xfffffffbffffdbffL & l) != 0L) + jjCheckNAddStates(19, 21); + break; + case 15: + if ((0x8400000000L & l) != 0L) + jjCheckNAddStates(19, 21); + break; + case 16: + if (curChar == 34 && kind > 20) + kind = 20; + break; + case 17: + if (curChar == 39) + jjCheckNAddStates(25, 28); + break; + case 18: + case 22: + if ((0xffffff7fffffffffL & l) != 0L) + jjCheckNAddStates(25, 28); + break; + case 20: + if ((0x8400000000L & l) != 0L) + jjCheckNAddStates(25, 28); + break; + case 21: + case 24: + if (curChar == 39) + jjCheckNAdd(22); + break; + case 23: + if (curChar == 39) + jjAddStates(29, 30); + break; + case 25: + if (curChar == 39 && kind > 21) + kind = 21; + break; + case 26: + if (curChar == 39) + jjstateSet[jjnewStateCnt++] = 25; + break; + case 27: + if (curChar == 39) + jjstateSet[jjnewStateCnt++] = 17; + break; + case 28: + if (curChar == 39) + jjstateSet[jjnewStateCnt++] = 27; + break; + case 29: + if (curChar == 34) + jjCheckNAddStates(31, 34); + break; + case 30: + case 34: + if ((0xfffffffbffffffffL & l) != 0L) + jjCheckNAddStates(31, 34); + break; + case 32: + if ((0x8400000000L & l) != 0L) + jjCheckNAddStates(31, 34); + break; + case 33: + case 36: + if (curChar == 34) + jjCheckNAdd(34); + break; + case 35: + if (curChar == 34) + jjAddStates(35, 36); + break; + case 37: + if (curChar == 34 && kind > 22) + kind = 22; + break; + case 38: + if (curChar == 34) + jjstateSet[jjnewStateCnt++] = 37; + break; + case 39: + if (curChar == 34) + jjstateSet[jjnewStateCnt++] = 29; + break; + case 40: + if (curChar == 34) + jjstateSet[jjnewStateCnt++] = 39; + break; + case 41: + if (curChar == 60) + jjCheckNAddTwoStates(42, 43); + break; + case 42: + if ((0xaffffffa00000000L & l) != 0L) + jjCheckNAddTwoStates(42, 43); + break; + case 43: + if (curChar == 62 && kind > 24) + kind = 24; + break; + case 44: + if (curChar == 58) + jjstateSet[jjnewStateCnt++] = 45; + break; + case 45: + if ((0x3ff000000000000L & l) == 0L) + break; + if (kind > 27) + kind = 27; + jjCheckNAddTwoStates(46, 47); + break; + case 46: + if ((0x3ff600000000000L & l) != 0L) + jjCheckNAddTwoStates(46, 47); + break; + case 47: + if ((0x3ff200000000000L & l) != 0L && kind > 27) + kind = 27; + break; + case 49: + if (curChar == 63) + jjstateSet[jjnewStateCnt++] = 50; + break; + case 50: + case 51: + if ((0x3ff000000000000L & l) == 0L) + break; + if (kind > 28) + kind = 28; + jjCheckNAdd(51); + break; + case 54: + if (curChar == 45) + jjCheckNAdd(55); + break; + case 55: + if ((0x3ff000000000000L & l) == 0L) + break; + if (kind > 29) + kind = 29; + jjCheckNAddTwoStates(54, 55); + break; + case 56: + if (curChar == 40) + jjCheckNAddStates(13, 15); + break; + case 57: + if (curChar == 35) + jjCheckNAddStates(37, 42); + break; + case 58: + if ((0xffffffffffffdbffL & l) != 0L) + jjCheckNAddStates(37, 42); + break; + case 59: + if ((0x2400L & l) != 0L) + jjCheckNAddStates(13, 15); + break; + case 60: + if ((0x100003600L & l) != 0L) + jjCheckNAddStates(13, 15); + break; + case 61: + if (curChar == 41 && kind > 34) + kind = 34; + break; + case 62: + if (curChar == 10) + jjCheckNAddStates(13, 15); + break; + case 63: + if (curChar == 13) + jjstateSet[jjnewStateCnt++] = 62; + break; + case 65: + if (curChar == 35) + jjCheckNAddStates(43, 48); + break; + case 66: + if ((0xffffffffffffdbffL & l) != 0L) + jjCheckNAddStates(43, 48); + break; + case 67: + if ((0x2400L & l) != 0L) + jjCheckNAddStates(49, 51); + break; + case 68: + if ((0x100003600L & l) != 0L) + jjCheckNAddStates(49, 51); + break; + case 70: + if (curChar == 10) + jjCheckNAddStates(49, 51); + break; + case 71: + if (curChar == 13) + jjstateSet[jjnewStateCnt++] = 70; + break; + case 72: + case 73: + if ((0x3ff000000000000L & l) == 0L) + break; + if (kind > 60) + kind = 60; + jjCheckNAdd(73); + break; + case 74: + if (curChar == 37) + jjCheckNAddTwoStates(75, 76); + break; + case 75: + if ((0xfffffffffffffbffL & l) != 0L) + jjCheckNAddTwoStates(75, 76); + break; + case 76: + if (curChar == 10 && kind > 62) + kind = 62; + break; + case 77: + if ((0x280000000000L & l) != 0L) + jjCheckNAddStates(8, 12); + break; + case 78: + if ((0x3ff000000000000L & l) == 0L) + break; + if (kind > 12) + kind = 12; + jjCheckNAdd(78); + break; + case 79: + if ((0x3ff000000000000L & l) != 0L) + jjCheckNAddTwoStates(79, 80); + break; + case 80: + if (curChar != 46) + break; + if (kind > 13) + kind = 13; + jjCheckNAdd(81); + break; + case 81: + if ((0x3ff000000000000L & l) == 0L) + break; + if (kind > 13) + kind = 13; + jjCheckNAdd(81); + break; + case 82: + if (curChar == 46) + jjCheckNAdd(83); + break; + case 83: + if ((0x3ff000000000000L & l) == 0L) + break; + if (kind > 13) + kind = 13; + jjCheckNAdd(83); + break; + case 84: + if (curChar == 46) + jjCheckNAdd(85); + break; + case 85: + if ((0x3ff000000000000L & l) != 0L) + jjCheckNAddTwoStates(85, 86); + break; + case 87: + if ((0x280000000000L & l) != 0L) + jjCheckNAdd(88); + break; + case 88: + if ((0x3ff000000000000L & l) == 0L) + break; + if (kind > 14) + kind = 14; + jjCheckNAdd(88); + break; + case 89: + if ((0x3ff000000000000L & l) != 0L) + jjCheckNAddStates(52, 55); + break; + case 90: + if ((0x3ff000000000000L & l) != 0L) + jjCheckNAddTwoStates(90, 91); + break; + case 91: + if (curChar == 46) + jjCheckNAddTwoStates(92, 93); + break; + case 92: + if ((0x3ff000000000000L & l) != 0L) + jjCheckNAddTwoStates(92, 93); + break; + case 94: + if ((0x280000000000L & l) != 0L) + jjCheckNAdd(95); + break; + case 95: + if ((0x3ff000000000000L & l) == 0L) + break; + if (kind > 14) + kind = 14; + jjCheckNAdd(95); + break; + case 96: + if ((0x3ff000000000000L & l) != 0L) + jjCheckNAddTwoStates(96, 97); + break; + case 98: + if ((0x280000000000L & l) != 0L) + jjCheckNAdd(99); + break; + case 99: + if ((0x3ff000000000000L & l) == 0L) + break; + if (kind > 14) + kind = 14; + jjCheckNAdd(99); + break; + case 100: + if ((0x3ff000000000000L & l) == 0L) + break; + if (kind > 12) + kind = 12; + jjCheckNAddStates(0, 7); + break; + case 101: + if ((0x3ff000000000000L & l) == 0L) + break; + if (kind > 23) + kind = 23; + jjCheckNAdd(101); + break; + case 102: + if (curChar == 46) + jjCheckNAddTwoStates(83, 85); + break; + case 104: + if ((0x3ff600000000000L & l) != 0L) + jjAddStates(56, 57); + break; + case 105: + if ((0x3ff200000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 106; + break; + case 106: + if (curChar == 58 && kind > 25) + kind = 25; + break; + case 107: + if ((0x3ff600000000000L & l) != 0L) + jjAddStates(58, 59); + break; + case 108: + if ((0x3ff200000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 109; + break; + case 109: + if (curChar == 58) + jjCheckNAdd(110); + break; + case 110: + if ((0x3ff000000000000L & l) == 0L) + break; + if (kind > 26) + kind = 26; + jjCheckNAddTwoStates(111, 112); + break; + case 111: + if ((0x3ff600000000000L & l) != 0L) + jjCheckNAddTwoStates(111, 112); + break; + case 112: + if ((0x3ff200000000000L & l) != 0L && kind > 26) + kind = 26; + break; + case 113: + if (curChar != 58) + break; + if (kind > 25) + kind = 25; + jjCheckNAdd(110); + break; + default : break; + } + } while(i != startsAt); + } + else if (curChar < 128) + { + long l = 1L << (curChar & 077); + do + { + switch(jjstateSet[--i]) + { + case 0: + if ((0x7fffffe87fffffeL & l) != 0L) + { + if (kind > 60) + kind = 60; + jjCheckNAdd(73); + } + else if (curChar == 91) + jjCheckNAddStates(49, 51); + else if (curChar == 64) + jjCheckNAdd(53); + else if (curChar == 92) + jjstateSet[jjnewStateCnt++] = 6; + if ((0x7fffffe07fffffeL & l) != 0L) + jjCheckNAddStates(60, 65); + else if (curChar == 95) + jjstateSet[jjnewStateCnt++] = 44; + break; + case 1: + if (kind > 7) + kind = 7; + jjAddStates(16, 18); + break; + case 5: + if (curChar == 92) + jjstateSet[jjnewStateCnt++] = 6; + break; + case 6: + if ((0x14404410144044L & l) != 0L && kind > 18) + kind = 18; + break; + case 8: + if ((0xffffffffefffffffL & l) != 0L) + jjCheckNAddStates(22, 24); + break; + case 9: + if (curChar == 92) + jjstateSet[jjnewStateCnt++] = 10; + break; + case 10: + if ((0x14404410144044L & l) != 0L) + jjCheckNAddStates(22, 24); + break; + case 13: + if ((0xffffffffefffffffL & l) != 0L) + jjCheckNAddStates(19, 21); + break; + case 14: + if (curChar == 92) + jjstateSet[jjnewStateCnt++] = 15; + break; + case 15: + if ((0x14404410144044L & l) != 0L) + jjCheckNAddStates(19, 21); + break; + case 18: + if ((0xffffffffefffffffL & l) != 0L) + jjCheckNAddStates(25, 28); + break; + case 19: + if (curChar == 92) + jjstateSet[jjnewStateCnt++] = 20; + break; + case 20: + if ((0x14404410144044L & l) != 0L) + jjCheckNAddStates(25, 28); + break; + case 22: + jjCheckNAddStates(25, 28); + break; + case 30: + if ((0xffffffffefffffffL & l) != 0L) + jjCheckNAddStates(31, 34); + break; + case 31: + if (curChar == 92) + jjstateSet[jjnewStateCnt++] = 32; + break; + case 32: + if ((0x14404410144044L & l) != 0L) + jjCheckNAddStates(31, 34); + break; + case 34: + jjCheckNAddStates(31, 34); + break; + case 42: + if ((0xc7fffffeafffffffL & l) != 0L) + jjAddStates(66, 67); + break; + case 45: + if ((0x7fffffe87fffffeL & l) == 0L) + break; + if (kind > 27) + kind = 27; + jjCheckNAddTwoStates(46, 47); + break; + case 46: + if ((0x7fffffe87fffffeL & l) != 0L) + jjCheckNAddTwoStates(46, 47); + break; + case 47: + if ((0x7fffffe87fffffeL & l) != 0L && kind > 27) + kind = 27; + break; + case 48: + if (curChar == 95) + jjstateSet[jjnewStateCnt++] = 44; + break; + case 50: + case 51: + if ((0x7fffffe87fffffeL & l) == 0L) + break; + if (kind > 28) + kind = 28; + jjCheckNAdd(51); + break; + case 52: + if (curChar == 64) + jjCheckNAdd(53); + break; + case 53: + if ((0x7fffffe07fffffeL & l) == 0L) + break; + if (kind > 29) + kind = 29; + jjCheckNAddTwoStates(53, 54); + break; + case 55: + if ((0x7fffffe07fffffeL & l) == 0L) + break; + if (kind > 29) + kind = 29; + jjCheckNAddTwoStates(54, 55); + break; + case 58: + jjAddStates(37, 42); + break; + case 64: + if (curChar == 91) + jjCheckNAddStates(49, 51); + break; + case 66: + jjCheckNAddStates(43, 48); + break; + case 69: + if (curChar == 93 && kind > 39) + kind = 39; + break; + case 72: + case 73: + if ((0x7fffffe87fffffeL & l) == 0L) + break; + if (kind > 60) + kind = 60; + jjCheckNAdd(73); + break; + case 75: + jjAddStates(68, 69); + break; + case 86: + if ((0x2000000020L & l) != 0L) + jjAddStates(70, 71); + break; + case 93: + if ((0x2000000020L & l) != 0L) + jjAddStates(72, 73); + break; + case 97: + if ((0x2000000020L & l) != 0L) + jjAddStates(74, 75); + break; + case 103: + if ((0x7fffffe07fffffeL & l) != 0L) + jjCheckNAddStates(60, 65); + break; + case 104: + if ((0x7fffffe87fffffeL & l) != 0L) + jjCheckNAddTwoStates(104, 105); + break; + case 105: + if ((0x7fffffe87fffffeL & l) != 0L) + jjCheckNAdd(106); + break; + case 107: + if ((0x7fffffe87fffffeL & l) != 0L) + jjCheckNAddTwoStates(107, 108); + break; + case 108: + if ((0x7fffffe87fffffeL & l) != 0L) + jjCheckNAdd(109); + break; + case 110: + if ((0x7fffffe87fffffeL & l) == 0L) + break; + if (kind > 26) + kind = 26; + jjCheckNAddTwoStates(111, 112); + break; + case 111: + if ((0x7fffffe87fffffeL & l) != 0L) + jjCheckNAddTwoStates(111, 112); + break; + case 112: + if ((0x7fffffe87fffffeL & l) != 0L && kind > 26) + kind = 26; + break; + default : break; + } + } while(i != startsAt); + } + else + { + int hiByte = (int)(curChar >> 8); + int i1 = hiByte >> 6; + long l1 = 1L << (hiByte & 077); + int i2 = (curChar & 0xff) >> 6; + long l2 = 1L << (curChar & 077); + do + { + switch(jjstateSet[--i]) + { + case 0: + if (jjCanMove_1(hiByte, i1, i2, l1, l2)) + { + if (kind > 60) + kind = 60; + jjCheckNAdd(73); + } + if (jjCanMove_1(hiByte, i1, i2, l1, l2)) + jjCheckNAddStates(60, 65); + break; + case 1: + if (!jjCanMove_0(hiByte, i1, i2, l1, l2)) + break; + if (kind > 7) + kind = 7; + jjAddStates(16, 18); + break; + case 8: + if (jjCanMove_0(hiByte, i1, i2, l1, l2)) + jjAddStates(22, 24); + break; + case 13: + if (jjCanMove_0(hiByte, i1, i2, l1, l2)) + jjAddStates(19, 21); + break; + case 18: + case 22: + if (jjCanMove_0(hiByte, i1, i2, l1, l2)) + jjCheckNAddStates(25, 28); + break; + case 30: + case 34: + if (jjCanMove_0(hiByte, i1, i2, l1, l2)) + jjCheckNAddStates(31, 34); + break; + case 42: + if (jjCanMove_0(hiByte, i1, i2, l1, l2)) + jjAddStates(66, 67); + break; + case 45: + if (!jjCanMove_1(hiByte, i1, i2, l1, l2)) + break; + if (kind > 27) + kind = 27; + jjCheckNAddTwoStates(46, 47); + break; + case 46: + if (jjCanMove_2(hiByte, i1, i2, l1, l2)) + jjCheckNAddTwoStates(46, 47); + break; + case 47: + if (jjCanMove_2(hiByte, i1, i2, l1, l2) && kind > 27) + kind = 27; + break; + case 50: + if (!jjCanMove_1(hiByte, i1, i2, l1, l2)) + break; + if (kind > 28) + kind = 28; + jjCheckNAdd(51); + break; + case 51: + if (!jjCanMove_2(hiByte, i1, i2, l1, l2)) + break; + if (kind > 28) + kind = 28; + jjCheckNAdd(51); + break; + case 58: + if (jjCanMove_0(hiByte, i1, i2, l1, l2)) + jjAddStates(37, 42); + break; + case 66: + if (jjCanMove_0(hiByte, i1, i2, l1, l2)) + jjAddStates(43, 48); + break; + case 72: + if (!jjCanMove_1(hiByte, i1, i2, l1, l2)) + break; + if (kind > 60) + kind = 60; + jjCheckNAdd(73); + break; + case 73: + if (!jjCanMove_2(hiByte, i1, i2, l1, l2)) + break; + if (kind > 60) + kind = 60; + jjCheckNAdd(73); + break; + case 75: + if (jjCanMove_0(hiByte, i1, i2, l1, l2)) + jjAddStates(68, 69); + break; + case 103: + if (jjCanMove_1(hiByte, i1, i2, l1, l2)) + jjCheckNAddStates(60, 65); + break; + case 104: + if (jjCanMove_2(hiByte, i1, i2, l1, l2)) + jjCheckNAddTwoStates(104, 105); + break; + case 105: + if (jjCanMove_2(hiByte, i1, i2, l1, l2)) + jjCheckNAdd(106); + break; + case 107: + if (jjCanMove_2(hiByte, i1, i2, l1, l2)) + jjCheckNAddTwoStates(107, 108); + break; + case 108: + if (jjCanMove_2(hiByte, i1, i2, l1, l2)) + jjCheckNAdd(109); + break; + case 110: + if (!jjCanMove_1(hiByte, i1, i2, l1, l2)) + break; + if (kind > 26) + kind = 26; + jjCheckNAddTwoStates(111, 112); + break; + case 111: + if (jjCanMove_2(hiByte, i1, i2, l1, l2)) + jjCheckNAddTwoStates(111, 112); + break; + case 112: + if (jjCanMove_2(hiByte, i1, i2, l1, l2) && kind > 26) + kind = 26; + break; + default : break; + } + } while(i != startsAt); + } + if (kind != 0x7fffffff) + { + jjmatchedKind = kind; + jjmatchedPos = curPos; + kind = 0x7fffffff; + } + ++curPos; + if ((i = jjnewStateCnt) == (startsAt = 114 - (jjnewStateCnt = startsAt))) + break; + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { break; } + } + if (jjmatchedPos > strPos) + return curPos; + + int toRet = Math.max(curPos, seenUpto); + + if (curPos < toRet) + for (i = toRet - Math.min(curPos, seenUpto); i-- > 0; ) + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { throw new Error("Internal Error : Please send a bug report."); } + + if (jjmatchedPos < strPos) + { + jjmatchedKind = strKind; + jjmatchedPos = strPos; + } + else if (jjmatchedPos == strPos && jjmatchedKind > strKind) + jjmatchedKind = strKind; + + return toRet; +} +static final int[] jjnextStates = { + 78, 79, 80, 90, 91, 96, 97, 101, 78, 79, 82, 84, 89, 57, 60, 61, + 1, 2, 4, 13, 14, 16, 8, 9, 11, 18, 19, 21, 23, 24, 26, 30, + 31, 33, 35, 36, 38, 57, 58, 59, 63, 60, 61, 65, 66, 67, 71, 68, + 69, 65, 68, 69, 90, 91, 96, 97, 104, 105, 107, 108, 104, 105, 106, 107, + 108, 109, 42, 43, 75, 76, 87, 88, 94, 95, 98, 99, +}; +private static final boolean jjCanMove_0(int hiByte, int i1, int i2, long l1, long l2) +{ + switch(hiByte) + { + case 0: + return ((jjbitVec2[i2] & l2) != 0L); + default : + if ((jjbitVec0[i1] & l1) != 0L) + return true; + return false; + } +} +private static final boolean jjCanMove_1(int hiByte, int i1, int i2, long l1, long l2) +{ + switch(hiByte) + { + case 0: + return ((jjbitVec4[i2] & l2) != 0L); + case 3: + return ((jjbitVec5[i2] & l2) != 0L); + case 32: + return ((jjbitVec6[i2] & l2) != 0L); + case 33: + return ((jjbitVec7[i2] & l2) != 0L); + case 47: + return ((jjbitVec8[i2] & l2) != 0L); + case 48: + return ((jjbitVec0[i2] & l2) != 0L); + case 255: + return ((jjbitVec9[i2] & l2) != 0L); + default : + if ((jjbitVec3[i1] & l1) != 0L) + return true; + return false; + } +} +private static final boolean jjCanMove_2(int hiByte, int i1, int i2, long l1, long l2) +{ + switch(hiByte) + { + case 0: + return ((jjbitVec10[i2] & l2) != 0L); + case 3: + return ((jjbitVec11[i2] & l2) != 0L); + case 32: + return ((jjbitVec12[i2] & l2) != 0L); + case 33: + return ((jjbitVec7[i2] & l2) != 0L); + case 47: + return ((jjbitVec8[i2] & l2) != 0L); + case 48: + return ((jjbitVec0[i2] & l2) != 0L); + case 255: + return ((jjbitVec9[i2] & l2) != 0L); + default : + if ((jjbitVec3[i1] & l1) != 0L) + return true; + return false; + } +} + +/** Token literal values. */ +public static final String[] jjstrLiteralImages = { +"", null, null, null, null, null, null, null, "\100\160\162\145\146\151\170", +"\100\142\141\163\145", null, null, null, null, null, null, null, null, null, null, null, null, null, +null, null, null, null, null, null, null, null, null, "\50", "\51", null, "\173", +"\175", "\133", "\135", null, "\73", "\54", "\56", "\75", "\72\55", "\44", "\77", +"\176", "\72", "\52", "\57", "\134", "\ufeff", "\136\136", "\100", null, null, null, +null, null, null, null, null, }; + +/** Lexer state names. */ +public static final String[] lexStateNames = { + "DEFAULT", +}; +static final long[] jjtoToken = { + 0x107fffff3ffc7f01L, +}; +static final long[] jjtoSkip = { + 0x40000000000000beL, +}; +static final long[] jjtoSpecial = { + 0x80L, +}; +protected SimpleCharStream input_stream; +private final int[] jjrounds = new int[114]; +private final int[] jjstateSet = new int[228]; +protected char curChar; +/** Constructor. */ +public JavaCCRuleParserTokenManager(SimpleCharStream stream){ + if (SimpleCharStream.staticFlag) + throw new Error("ERROR: Cannot use a static CharStream class with a non-static lexical analyzer."); + input_stream = stream; +} + +/** Constructor. */ +public JavaCCRuleParserTokenManager(SimpleCharStream stream, int lexState){ + this(stream); + SwitchTo(lexState); +} + +/** Reinitialise parser. */ +public void ReInit(SimpleCharStream stream) +{ + jjmatchedPos = jjnewStateCnt = 0; + curLexState = defaultLexState; + input_stream = stream; + ReInitRounds(); +} +private void ReInitRounds() +{ + int i; + jjround = 0x80000001; + for (i = 114; i-- > 0;) + jjrounds[i] = 0x80000000; +} + +/** Reinitialise parser. */ +public void ReInit(SimpleCharStream stream, int lexState) +{ + ReInit(stream); + SwitchTo(lexState); +} + +/** Switch to specified lex state. */ +public void SwitchTo(int lexState) +{ + if (lexState >= 1 || lexState < 0) + throw new TokenMgrError("Error: Ignoring invalid lexical state : " + lexState + ". State unchanged.", TokenMgrError.INVALID_LEXICAL_STATE); + else + curLexState = lexState; +} + +protected Token jjFillToken() +{ + final Token t; + final String curTokenImage; + final int beginLine; + final int endLine; + final int beginColumn; + final int endColumn; + String im = jjstrLiteralImages[jjmatchedKind]; + curTokenImage = (im == null) ? input_stream.GetImage() : im; + beginLine = input_stream.getBeginLine(); + beginColumn = input_stream.getBeginColumn(); + endLine = input_stream.getEndLine(); + endColumn = input_stream.getEndColumn(); + t = Token.newToken(jjmatchedKind, curTokenImage); + + t.beginLine = beginLine; + t.endLine = endLine; + t.beginColumn = beginColumn; + t.endColumn = endColumn; + + return t; +} + +int curLexState = 0; +int defaultLexState = 0; +int jjnewStateCnt; +int jjround; +int jjmatchedPos; +int jjmatchedKind; + +/** Get the next Token. */ +public Token getNextToken() +{ + Token specialToken = null; + Token matchedToken; + int curPos = 0; + + EOFLoop : + for (;;) + { + try + { + curChar = input_stream.BeginToken(); + } + catch(java.io.IOException e) + { + jjmatchedKind = 0; + matchedToken = jjFillToken(); + matchedToken.specialToken = specialToken; + return matchedToken; + } + + jjmatchedKind = 0x7fffffff; + jjmatchedPos = 0; + curPos = jjMoveStringLiteralDfa0_0(); + if (jjmatchedKind != 0x7fffffff) + { + if (jjmatchedPos + 1 < curPos) + input_stream.backup(curPos - jjmatchedPos - 1); + if ((jjtoToken[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L) + { + matchedToken = jjFillToken(); + matchedToken.specialToken = specialToken; + return matchedToken; + } + else + { + if ((jjtoSpecial[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L) + { + matchedToken = jjFillToken(); + if (specialToken == null) + specialToken = matchedToken; + else + { + matchedToken.specialToken = specialToken; + specialToken = (specialToken.next = matchedToken); + } + } + continue EOFLoop; + } + } + int error_line = input_stream.getEndLine(); + int error_column = input_stream.getEndColumn(); + String error_after = null; + boolean EOFSeen = false; + try { input_stream.readChar(); input_stream.backup(1); } + catch (java.io.IOException e1) { + EOFSeen = true; + error_after = curPos <= 1 ? "" : input_stream.GetImage(); + if (curChar == '\n' || curChar == '\r') { + error_line++; + error_column = 0; + } + else + error_column++; + } + if (!EOFSeen) { + input_stream.backup(1); + error_after = curPos <= 1 ? "" : input_stream.GetImage(); + } + throw new TokenMgrError(EOFSeen, curLexState, error_line, error_column, error_after, curChar, TokenMgrError.LEXICAL_ERROR); + } +} + +private void jjCheckNAdd(int state) +{ + if (jjrounds[state] != jjround) + { + jjstateSet[jjnewStateCnt++] = state; + jjrounds[state] = jjround; + } +} +private void jjAddStates(int start, int end) +{ + do { + jjstateSet[jjnewStateCnt++] = jjnextStates[start]; + } while (start++ != end); +} +private void jjCheckNAddTwoStates(int state1, int state2) +{ + jjCheckNAdd(state1); + jjCheckNAdd(state2); +} + +private void jjCheckNAddStates(int start, int end) +{ + do { + jjCheckNAdd(jjnextStates[start]); + } while (start++ != end); +} + +} From 21f716d3228b5f79e3e90e8e137399784b95406f Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Wed, 17 Jul 2019 15:14:52 +0200 Subject: [PATCH 0015/1003] include vlog4j-parser in parent --- pom.xml | 1 + 1 file changed, 1 insertion(+) diff --git a/pom.xml b/pom.xml index f15a780ec..efb3e9ad9 100644 --- a/pom.xml +++ b/pom.xml @@ -20,6 +20,7 @@ vlog4j-examples vlog4j-owlapi vlog4j-graal + vlog4j-parser From 7226483d774856c967196230ef8b8969c492feaa Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Wed, 17 Jul 2019 15:15:49 +0200 Subject: [PATCH 0016/1003] add doid example using our own syntax --- vlog4j-examples/pom.xml | 5 + .../data/input/localSyntax/doid-example.txt | 26 +++++ .../examples/DoidExampleLocalSyntax.java | 110 ++++++++++++++++++ 3 files changed, 141 insertions(+) create mode 100644 vlog4j-examples/src/main/data/input/localSyntax/doid-example.txt create mode 100644 vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExampleLocalSyntax.java diff --git a/vlog4j-examples/pom.xml b/vlog4j-examples/pom.xml index fac638a4a..a36d00215 100644 --- a/vlog4j-examples/pom.xml +++ b/vlog4j-examples/pom.xml @@ -37,6 +37,11 @@ vlog4j-graal ${project.version} + + ${project.groupId} + vlog4j-parser + ${project.version} + org.slf4j slf4j-log4j12 diff --git a/vlog4j-examples/src/main/data/input/localSyntax/doid-example.txt b/vlog4j-examples/src/main/data/input/localSyntax/doid-example.txt new file mode 100644 index 000000000..2c46b3418 --- /dev/null +++ b/vlog4j-examples/src/main/data/input/localSyntax/doid-example.txt @@ -0,0 +1,26 @@ +@base . +@prefix xsd: . + +%this is a comment + +deathCause(?X, ?Z) :- recentDeathsCause(?X, ?Z) . +deathCause(?X, ?Z) :- recentDeaths(?X) . + +doid(?Iri,?DoidId) :- doidTriple(?Iri,,?DoidId) . +hasDoid(?X) :- diseaseId(?X,?DoidId) . + +diseaseHierarchy(?X,?Y) :- doidTriple(?X,,?Y) . +diseaseHierarchy(?X,?Z) :- diseaseHierarchy(?X,?Y), doidTriple(?Y,,?Z) . + +%the next three rules are the same +%cancerDisease(?Xdoid) :- diseaseHierarchy(?X,?Y), doid(?Y,"DOID:162"^^), doid(?X, ?Xdoid) . +%cancerDisease(?Xdoid) :- diseaseHierarchy(?X,?Y), doid(?Y,"DOID:162"^^xsd:string), doid(?X, ?Xdoid) . +cancerDisease(?Xdoid) :- diseaseHierarchy(?X,?Y), doid(?Y,"DOID:162"), doid(?X, ?Xdoid) . + +humansWhoDiedOfCancer(?X) :- deathCause(?X,?Y), diseaseId(?Y,?Z), cancerDisease(?Z) . +humansWhoDiedOfNoncancer(?X) :- deathCause(?X,?Y), diseaseId(?Y,?Z), ~cancerDisease(?Z) . +humansWhoDiedOfNoncancer(?X) :- deathCause(?X,?y), ~hasDoid(?y) . + +humansWhoDiedOfCancer(?X) . +humansWhoDiedOfNoncancer(?X) . + diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExampleLocalSyntax.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExampleLocalSyntax.java new file mode 100644 index 000000000..ee03192a1 --- /dev/null +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExampleLocalSyntax.java @@ -0,0 +1,110 @@ +package org.semanticweb.vlog4j.examples; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.net.URL; + +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.DataSource; +import org.semanticweb.vlog4j.core.reasoner.LogLevel; +import org.semanticweb.vlog4j.core.reasoner.Reasoner; +import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; +import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; +import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; + +import org.semanticweb.vlog4j.parser.api.RuleParser; +import org.semanticweb.vlog4j.parser.implementation.PrologueException; +import org.semanticweb.vlog4j.parser.implementation.javacc.ParseException; + +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePredicate; + +/** + * This example reasons about human diseases, based on information from the + * Disease Ontology (DOID) and Wikidata. It illustrates how to load data from + * different sources (RDF file, SPARQL), and reason about these inputs using + * rules that are loaded from a file. The rules used here employ existential + * quantifiers and stratified negation. + * + * @author Markus Kroetzsch + * @author Larry Gonzalez + */ +public class DoidExampleLocalSyntax { + + public static void main(final String[] args) throws ReasonerStateException, IOException, EdbIdbSeparationException, + IncompatiblePredicateArityException, ParseException, PrologueException { + + ExamplesUtils.configureLogging(); + + final URL wikidataSparqlEndpoint = new URL("https://query.wikidata.org/sparql"); + + try (final Reasoner reasoner = Reasoner.getInstance()) { + reasoner.setLogFile(ExamplesUtils.OUTPUT_FOLDER+"vlog.log"); + reasoner.setLogLevel(LogLevel.DEBUG); + + /* Configure RDF data source */ + final Predicate doidTriplePredicate = makePredicate("doidTriple", 3); + final DataSource doidDataSource = new RdfFileDataSource( + new File(ExamplesUtils.INPUT_FOLDER + "doid.nt.gz")); + reasoner.addFactsFromDataSource(doidTriplePredicate, doidDataSource); + + /* Configure SPARQL data sources */ + final String sparqlHumansWithDisease = "?disease wdt:P699 ?doid ."; + // (wdt:P669 = "Disease Ontology ID") + final DataSource diseasesDataSource = new SparqlQueryResultDataSource(wikidataSparqlEndpoint, + "disease,doid", sparqlHumansWithDisease); + final Predicate diseaseIdPredicate = Expressions.makePredicate("diseaseId", 2); + reasoner.addFactsFromDataSource(diseaseIdPredicate, diseasesDataSource); + + final String sparqlRecentDeaths = "?human wdt:P31 wd:Q5; wdt:P570 ?deathDate . FILTER (YEAR(?deathDate) = 2018)"; + // (wdt:P31 = "instance of"; wd:Q5 = "human", wdt:570 = "date of death") + final DataSource recentDeathsDataSource = new SparqlQueryResultDataSource(wikidataSparqlEndpoint, "human", + sparqlRecentDeaths); + final Predicate recentDeathsPredicate = Expressions.makePredicate("recentDeaths", 1); + reasoner.addFactsFromDataSource(recentDeathsPredicate, recentDeathsDataSource); + + final String sparqlRecentDeathsCause = sparqlRecentDeaths + "?human wdt:P509 ?causeOfDeath . "; + // (wdt:P509 = "cause of death") + final DataSource recentDeathsCauseDataSource = new SparqlQueryResultDataSource(wikidataSparqlEndpoint, + "human,causeOfDeath", sparqlRecentDeathsCause); + final Predicate recentDeathsCausePredicate = Expressions.makePredicate("recentDeathsCause", 2); + reasoner.addFactsFromDataSource(recentDeathsCausePredicate, recentDeathsCauseDataSource); + + RuleParser rp = new RuleParser( + new FileInputStream(ExamplesUtils.INPUT_FOLDER + "/localSyntax/doid-example.txt")); + for (Rule rule : rp.getRules()) + System.out.println(rule); + rp.parse(); + + reasoner.addFacts(rp.getFacts()); + reasoner.addRules(rp.getRules()); + + System.out.println("Rules configured:\n--"); + reasoner.getRules().forEach(System.out::println); + System.out.println("--"); + reasoner.load(); + System.out.println("Loading completed."); + System.out.println("Starting reasoning (including SPARQL query answering) ..."); + reasoner.reason(); + System.out.println("... reasoning completed.\n--"); + + System.out.println("Number of results in queries:"); + QueryResultIterator answers; + for (PositiveLiteral l : rp.getQueries()) { + answers = reasoner.answerQuery(l, true); + System.out.print(l.toString()); + System.out.println(": " + ExamplesUtils.iteratorSize(answers)); + } + System.out.println("Done."); + + } + + } + +} \ No newline at end of file From 0384186a9f0721e84c43b4336617a6a754bb932a Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Wed, 17 Jul 2019 15:56:23 +0200 Subject: [PATCH 0017/1003] fix dependency parser -> core. add licenses --- .../examples/DoidExampleLocalSyntax.java | 25 ++- vlog4j-parser/LICENSE.txt | 201 ++++++++++++++++++ vlog4j-parser/pom.xml | 56 ++--- .../vlog4j/parser/api/Prologue.java | 20 ++ .../vlog4j/parser/api/RuleParser.java | 22 +- .../parser/implementation/LocalPrologue.java | 22 +- .../implementation/PrologueException.java | 20 ++ .../parser/implementation/RuleParserBase.java | 22 +- .../javacc/JavaCCRuleParser.java | 22 +- .../javacc/JavaCCRuleParserTokenManager.java | 20 ++ 10 files changed, 398 insertions(+), 32 deletions(-) create mode 100644 vlog4j-parser/LICENSE.txt diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExampleLocalSyntax.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExampleLocalSyntax.java index ee03192a1..957b9b081 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExampleLocalSyntax.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExampleLocalSyntax.java @@ -1,5 +1,25 @@ package org.semanticweb.vlog4j.examples; +/*- + * #%L + * VLog4j Examples + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import java.io.File; import java.io.FileInputStream; import java.io.IOException; @@ -78,11 +98,8 @@ public static void main(final String[] args) throws ReasonerStateException, IOEx RuleParser rp = new RuleParser( new FileInputStream(ExamplesUtils.INPUT_FOLDER + "/localSyntax/doid-example.txt")); - for (Rule rule : rp.getRules()) - System.out.println(rule); rp.parse(); - reasoner.addFacts(rp.getFacts()); reasoner.addRules(rp.getRules()); System.out.println("Rules configured:\n--"); @@ -107,4 +124,4 @@ public static void main(final String[] args) throws ReasonerStateException, IOEx } -} \ No newline at end of file +} diff --git a/vlog4j-parser/LICENSE.txt b/vlog4j-parser/LICENSE.txt new file mode 100644 index 000000000..261eeb9e9 --- /dev/null +++ b/vlog4j-parser/LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vlog4j-parser/pom.xml b/vlog4j-parser/pom.xml index dd1568c8b..0130d903f 100644 --- a/vlog4j-parser/pom.xml +++ b/vlog4j-parser/pom.xml @@ -1,26 +1,34 @@ - - 4.0.0 - - org.semanticweb.vlog4j - vlog4j-parent - 0.4.0-SNAPSHOT - - org.semanticweb.vlog4j - vlog4j-parser - 0.4.0-SNAPSHOT - vlog4j-parser - http://maven.apache.org - - UTF-8 - - - - junit - junit - 3.8.1 - test - - + + 4.0.0 + + + org.semanticweb.vlog4j + vlog4j-parent + 0.4.0-SNAPSHOT + + + vlog4j-parser + + vlog4j-parser + http://maven.apache.org + + UTF-8 + + + + ${project.groupId} + vlog4j-core + ${project.version} + + + junit + junit + 3.8.1 + test + + diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/api/Prologue.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/api/Prologue.java index d99bd8c38..4eaf715f0 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/api/Prologue.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/api/Prologue.java @@ -1,5 +1,25 @@ package org.semanticweb.vlog4j.parser.api; +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import org.semanticweb.vlog4j.parser.implementation.PrologueException; public interface Prologue { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/api/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/api/RuleParser.java index 0e91a7814..b08ab2e80 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/api/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/api/RuleParser.java @@ -1,5 +1,25 @@ package org.semanticweb.vlog4j.parser.api; +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import java.io.ByteArrayInputStream; import java.io.InputStream; @@ -20,4 +40,4 @@ public RuleParser(InputStream stream, String encoding) { public RuleParser(String rules) { super(new ByteArrayInputStream(rules.getBytes()), "UTF-8"); } -} \ No newline at end of file +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/LocalPrologue.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/LocalPrologue.java index 600c9235d..df6975556 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/LocalPrologue.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/LocalPrologue.java @@ -1,5 +1,25 @@ package org.semanticweb.vlog4j.parser.implementation; +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import java.net.URI; import java.util.HashMap; import java.util.Map; @@ -90,4 +110,4 @@ public String resolvePName(String prefixedName) throws PrologueException { throw new PrologueException("@prefix not found: " + prefixedName); } -} \ No newline at end of file +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/PrologueException.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/PrologueException.java index c69b5b157..3ee43243d 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/PrologueException.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/PrologueException.java @@ -1,5 +1,25 @@ package org.semanticweb.vlog4j.parser.implementation; +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + public class PrologueException extends Exception { /** * diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/RuleParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/RuleParserBase.java index e9db151f9..9c9df09de 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/RuleParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/RuleParserBase.java @@ -1,5 +1,25 @@ package org.semanticweb.vlog4j.parser.implementation; +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import java.util.List; import java.util.ArrayList; @@ -346,4 +366,4 @@ public List getQueries() { } -} \ No newline at end of file +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.java index 662a25638..6d5793280 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.java @@ -1,6 +1,26 @@ /* Generated By:JavaCC: Do not edit this line. JavaCCRuleParser.java */ package org.semanticweb.vlog4j.parser.implementation.javacc; +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import java.util.List; import java.util.ArrayList; @@ -314,7 +334,7 @@ final public Constant NumericLiteral() throws ParseException { final public String RDFLiteral() throws ParseException, PrologueException { Token t; String lex = null; - String lang = null; // Optional lang tag and datatype. + String lang = null; // Optional lang tag and datatype. String dt = null; lex = String(); switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParserTokenManager.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParserTokenManager.java index 215dac1f9..6bea8ccdb 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParserTokenManager.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParserTokenManager.java @@ -1,5 +1,25 @@ /* Generated By:JavaCC: Do not edit this line. JavaCCRuleParserTokenManager.java */ package org.semanticweb.vlog4j.parser.implementation.javacc; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ import java.util.List; import java.util.ArrayList; import org.semanticweb.vlog4j.parser.implementation.RuleParserBase; From 985eeb0b1e4a701144587f38bb9e4c5d96dab957 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Wed, 17 Jul 2019 17:56:32 +0200 Subject: [PATCH 0018/1003] fix pom file to build javacc parser --- vlog4j-parser/pom.xml | 25 ++++++++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/vlog4j-parser/pom.xml b/vlog4j-parser/pom.xml index 0130d903f..644efe5df 100644 --- a/vlog4j-parser/pom.xml +++ b/vlog4j-parser/pom.xml @@ -30,5 +30,28 @@ 3.8.1 test - + + + + + + + org.codehaus.mojo + javacc-maven-plugin + 2.6 + + + ruleparser + + ${basedir}/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc + + + javacc + + + + + + +
    From c8bc21f730ccee20985c1350198ab8f1c29f3752 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Wed, 17 Jul 2019 18:15:14 +0200 Subject: [PATCH 0019/1003] modify travis settings. Just for testing --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 68930cbba..07ab6a468 100644 --- a/.travis.yml +++ b/.travis.yml @@ -16,7 +16,7 @@ before_install: - sudo apt-get install gcc-5 -y # - eval “CC=gcc-5 && CXX=g++-5” ## Uncomment line below to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar - - sh ./build-vlog-library.sh + #- sh ./build-vlog-library.sh after_success: - mvn clean cobertura:cobertura coveralls:cobertura From e1d64edec7b6e9e7daa9e995c1749375ad4ab727 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Wed, 17 Jul 2019 19:55:56 +0200 Subject: [PATCH 0020/1003] fix pom lifecycle and organize imports --- .../examples/DoidExampleLocalSyntax.java | 4 +- vlog4j-parser/pom.xml | 56 +- .../parser/implementation/javacc/.gitignore | 2 + .../javacc/JavaCCRuleParser.java | 1009 ----------- .../javacc/JavaCCRuleParserTokenManager.java | 1520 ----------------- 5 files changed, 46 insertions(+), 2545 deletions(-) delete mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.java delete mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParserTokenManager.java diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExampleLocalSyntax.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExampleLocalSyntax.java index 957b9b081..03d330284 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExampleLocalSyntax.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExampleLocalSyntax.java @@ -19,6 +19,7 @@ * limitations under the License. * #L% */ +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePredicate; import java.io.File; import java.io.FileInputStream; @@ -27,7 +28,6 @@ import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.DataSource; import org.semanticweb.vlog4j.core.reasoner.LogLevel; @@ -43,7 +43,7 @@ import org.semanticweb.vlog4j.parser.implementation.PrologueException; import org.semanticweb.vlog4j.parser.implementation.javacc.ParseException; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePredicate; + /** * This example reasons about human diseases, based on information from the diff --git a/vlog4j-parser/pom.xml b/vlog4j-parser/pom.xml index 644efe5df..3843c2954 100644 --- a/vlog4j-parser/pom.xml +++ b/vlog4j-parser/pom.xml @@ -33,23 +33,51 @@ + + + org.codehaus.mojo + javacc-maven-plugin + 2.6 + + + ruleparser + + ${basedir}/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/ + + + javacc + + + + + + - org.codehaus.mojo - javacc-maven-plugin - 2.6 - - - ruleparser - - ${basedir}/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc - - - javacc - - - + org.eclipse.m2e + lifecycle-mapping + 1.0.0 + + + + + + org.codehaus.mojo + javacc-maven-plugin + [2.6,) + + javacc + + + + + + + + + diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/.gitignore b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/.gitignore index 61eb9ad4c..3243e7e57 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/.gitignore +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/.gitignore @@ -3,3 +3,5 @@ /SimpleCharStream.java /Token.java /TokenMgrError.java +/JavaCCRuleParser.java +/JavaCCRuleParserTokenManager.java diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.java deleted file mode 100644 index 6d5793280..000000000 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.java +++ /dev/null @@ -1,1009 +0,0 @@ -/* Generated By:JavaCC: Do not edit this line. JavaCCRuleParser.java */ -package org.semanticweb.vlog4j.parser.implementation.javacc; - -/*- - * #%L - * vlog4j-parser - * %% - * Copyright (C) 2018 - 2019 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.util.List; -import java.util.ArrayList; - -import org.semanticweb.vlog4j.parser.implementation.RuleParserBase; -import org.semanticweb.vlog4j.parser.implementation.PrologueException; - -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Constant; - -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePositiveLiteral; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeNegativeLiteral; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePositiveConjunction; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeConjunction; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeRule; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeVariable; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeConstant; - - -public class JavaCCRuleParser extends RuleParserBase implements JavaCCRuleParserConstants { - - final public void parse() throws ParseException, PrologueException { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case BASE: - base(); - break; - default: - jj_la1[0] = jj_gen; - ; - } - label_1: - while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case PREFIX: - ; - break; - default: - jj_la1[1] = jj_gen; - break label_1; - } - prefix(); - } - label_2: - while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case IRI: - case PNAME_NS: - case PNAME_LN: - case VARORPREDNAME: - ; - break; - default: - jj_la1[2] = jj_gen; - break label_2; - } - statement(); - } - jj_consume_token(0); - } - - final public void base() throws ParseException, PrologueException { - String iriString; - jj_consume_token(BASE); - iriString = IRIREF(); - jj_consume_token(DOT); - localPrologue.setBase(iriString); - } - - final public void prefix() throws ParseException, PrologueException { - Token t; - String iriString; - jj_consume_token(PREFIX); - t = jj_consume_token(PNAME_NS); - iriString = IRIREF(); - jj_consume_token(DOT); - //note that prefix includes the colon (:) - localPrologue.setPrefix(t.image, iriString); - } - - final public void statement() throws ParseException, PrologueException { - Rule r; - PositiveLiteral l; - if (jj_2_1(2147483647)) { - r = rule(); - listOfRules.add(r); - } else { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case IRI: - case PNAME_NS: - case PNAME_LN: - case VARORPREDNAME: - l = positiveLiteral(); - jj_consume_token(DOT); - if (l.getVariables().isEmpty()) - listOfFacts.add(l); - else - listOfQueries.add(l); - break; - default: - jj_la1[3] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - } - } - - final public Rule rule() throws ParseException, PrologueException { - List < PositiveLiteral > head; - List < Literal > body; - head = listOfPositiveLiterals(); - jj_consume_token(ARROW); - body = listOfLiterals(); - jj_consume_token(DOT); - {if (true) return makeRule(makePositiveConjunction(head), makeConjunction(body));} - throw new Error("Missing return statement in function"); - } - - final public List < PositiveLiteral > listOfPositiveLiterals() throws ParseException, PrologueException { - PositiveLiteral l; - List < PositiveLiteral > list = new ArrayList < PositiveLiteral > (); - l = positiveLiteral(); - list.add(l); - label_3: - while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case COMMA: - ; - break; - default: - jj_la1[4] = jj_gen; - break label_3; - } - jj_consume_token(COMMA); - l = positiveLiteral(); - list.add(l); - } - {if (true) return list;} - throw new Error("Missing return statement in function"); - } - - final public List < Literal > listOfLiterals() throws ParseException, PrologueException { - Literal l; - List < Literal > list = new ArrayList < Literal > (); - l = literal(); - list.add(l); - label_4: - while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case COMMA: - ; - break; - default: - jj_la1[5] = jj_gen; - break label_4; - } - jj_consume_token(COMMA); - l = literal(); - list.add(l); - } - {if (true) return list;} - throw new Error("Missing return statement in function"); - } - - final public Literal literal() throws ParseException, PrologueException { - Literal l = null; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case IRI: - case PNAME_NS: - case PNAME_LN: - case VARORPREDNAME: - l = positiveLiteral(); - {if (true) return l;} - break; - case TILDE: - l = negativeLiteral(); - {if (true) return l;} - break; - default: - jj_la1[6] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - throw new Error("Missing return statement in function"); - } - - final public PositiveLiteral positiveLiteral() throws ParseException, PrologueException { - Token t; - List < Term > terms; - String predicateName; - predicateName = predicateName(); - jj_consume_token(LPAREN); - terms = listOfTerms(); - jj_consume_token(RPAREN); - {if (true) return makePositiveLiteral(predicateName, terms);} - throw new Error("Missing return statement in function"); - } - - final public NegativeLiteral negativeLiteral() throws ParseException, PrologueException { - List < Term > terms; - String predicateName; - jj_consume_token(TILDE); - predicateName = predicateName(); - jj_consume_token(LPAREN); - terms = listOfTerms(); - jj_consume_token(RPAREN); - {if (true) return makeNegativeLiteral(predicateName, terms);} - throw new Error("Missing return statement in function"); - } - - final public List < Term > listOfTerms() throws ParseException, PrologueException { - Term t; - List < Term > list = new ArrayList < Term > (); - t = term(); - list.add(t); - label_5: - while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case COMMA: - ; - break; - default: - jj_la1[7] = jj_gen; - break label_5; - } - jj_consume_token(COMMA); - t = term(); - list.add(t); - } - {if (true) return list;} - throw new Error("Missing return statement in function"); - } - - final public String predicateName() throws ParseException, PrologueException { - String s; - Token t; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case IRI: - case PNAME_NS: - case PNAME_LN: - s = IRI(); - {if (true) return s;} - break; - case VARORPREDNAME: - t = jj_consume_token(VARORPREDNAME); - {if (true) return t.image;} - break; - default: - jj_la1[8] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - throw new Error("Missing return statement in function"); - } - - final public Term term() throws ParseException, PrologueException { - String s; - Token t; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case IRI: - case PNAME_NS: - case PNAME_LN: - s = IRI(); - {if (true) return makeConstant(s);} - break; - case STRING_LITERAL1: - case STRING_LITERAL2: - case STRING_LITERAL_LONG1: - case STRING_LITERAL_LONG2: - s = RDFLiteral(); - {if (true) return makeConstant(s);} - break; - case VAR: - t = jj_consume_token(VAR); - {if (true) return makeVariable(t.image.substring(1));} - break; - default: - jj_la1[9] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - throw new Error("Missing return statement in function"); - } - -/** [16] */ - final public Constant NumericLiteral() throws ParseException { - Token t; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case INTEGER: - t = jj_consume_token(INTEGER); - {if (true) return createLiteralInteger(t.image);} - break; - case DECIMAL: - t = jj_consume_token(DECIMAL); - {if (true) return createLiteralDecimal(t.image);} - break; - case DOUBLE: - t = jj_consume_token(DOUBLE); - {if (true) return createLiteralDouble(t.image);} - break; - default: - jj_la1[10] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - throw new Error("Missing return statement in function"); - } - - final public String RDFLiteral() throws ParseException, PrologueException { - Token t; - String lex = null; - String lang = null; // Optional lang tag and datatype. - String dt = null; - lex = String(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case LANGTAG: - case DATATYPE: - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case LANGTAG: - lang = Langtag(); - break; - case DATATYPE: - jj_consume_token(DATATYPE); - dt = IRI(); - break; - default: - jj_la1[11] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - break; - default: - jj_la1[12] = jj_gen; - ; - } - {if (true) return strRDFLiteral(lex, lang, dt);} - throw new Error("Missing return statement in function"); - } - - final public String Langtag() throws ParseException { - Token t; - t = jj_consume_token(LANGTAG); - String lang = stripChars(t.image, 1); - {if (true) return lang;} - throw new Error("Missing return statement in function"); - } - - final public String BooleanLiteral() throws ParseException { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case TRUE: - jj_consume_token(TRUE); - {if (true) return "true^^http://www.w3.org/2001/XMLSchema#boolean";} - break; - case FALSE: - jj_consume_token(FALSE); - {if (true) return "false^^http://www.w3.org/2001/XMLSchema#boolean";} - break; - default: - jj_la1[13] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - throw new Error("Missing return statement in function"); - } - - final public String String() throws ParseException { - Token t; - String lex; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case STRING_LITERAL1: - t = jj_consume_token(STRING_LITERAL1); - lex = stripQuotes(t.image); - break; - case STRING_LITERAL2: - t = jj_consume_token(STRING_LITERAL2); - lex = stripQuotes(t.image); - break; - case STRING_LITERAL_LONG1: - t = jj_consume_token(STRING_LITERAL_LONG1); - lex = stripQuotes3(t.image); - break; - case STRING_LITERAL_LONG2: - t = jj_consume_token(STRING_LITERAL_LONG2); - lex = stripQuotes3(t.image); - break; - default: - jj_la1[14] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - lex = unescapeStr(lex, t.beginLine, t.beginColumn); - {if (true) return lex;} - throw new Error("Missing return statement in function"); - } - - final public String IRI() throws ParseException, PrologueException { - String iri; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case IRI: - iri = IRIREF(); - break; - case PNAME_NS: - case PNAME_LN: - iri = PrefixedName(); - break; - default: - jj_la1[15] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - {if (true) return "<"+iri+">";} - throw new Error("Missing return statement in function"); - } - - final public String PrefixedName() throws ParseException, PrologueException { - Token t; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case PNAME_LN: - t = jj_consume_token(PNAME_LN); - break; - case PNAME_NS: - t = jj_consume_token(PNAME_NS); - break; - default: - jj_la1[16] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - {if (true) return localPrologue.resolvePName(t.image);} - throw new Error("Missing return statement in function"); - } - - final public String IRIREF() throws ParseException { - Token t; - t = jj_consume_token(IRI); - // we remove '<' and '>' - {if (true) return t.image.substring(1,t.image.length()-1);} - throw new Error("Missing return statement in function"); - } - - private boolean jj_2_1(int xla) { - jj_la = xla; jj_lastpos = jj_scanpos = token; - try { return !jj_3_1(); } - catch(LookaheadSuccess ls) { return true; } - finally { jj_save(0, xla); } - } - - private boolean jj_3R_16() { - if (jj_3R_21()) return true; - return false; - } - - private boolean jj_3R_11() { - Token xsp; - xsp = jj_scanpos; - if (jj_3R_15()) { - jj_scanpos = xsp; - if (jj_3R_16()) return true; - } - return false; - } - - private boolean jj_3R_15() { - if (jj_3R_9()) return true; - return false; - } - - private boolean jj_3R_37() { - if (jj_3R_39()) return true; - return false; - } - - private boolean jj_3R_32() { - Token xsp; - xsp = jj_scanpos; - if (jj_3R_37()) { - jj_scanpos = xsp; - if (jj_3R_38()) return true; - } - return false; - } - - private boolean jj_3R_30() { - Token xsp; - xsp = jj_scanpos; - if (jj_scan_token(26)) { - jj_scanpos = xsp; - if (jj_scan_token(25)) return true; - } - return false; - } - - private boolean jj_3R_18() { - if (jj_scan_token(VARORPREDNAME)) return true; - return false; - } - - private boolean jj_3R_13() { - Token xsp; - xsp = jj_scanpos; - if (jj_3R_17()) { - jj_scanpos = xsp; - if (jj_3R_18()) return true; - } - return false; - } - - private boolean jj_3R_17() { - if (jj_3R_22()) return true; - return false; - } - - private boolean jj_3R_12() { - if (jj_scan_token(COMMA)) return true; - if (jj_3R_11()) return true; - return false; - } - - private boolean jj_3R_39() { - if (jj_scan_token(LANGTAG)) return true; - return false; - } - - private boolean jj_3R_27() { - if (jj_3R_30()) return true; - return false; - } - - private boolean jj_3R_26() { - if (jj_3R_29()) return true; - return false; - } - - private boolean jj_3R_8() { - if (jj_3R_11()) return true; - Token xsp; - while (true) { - xsp = jj_scanpos; - if (jj_3R_12()) { jj_scanpos = xsp; break; } - } - return false; - } - - private boolean jj_3R_20() { - if (jj_scan_token(COMMA)) return true; - if (jj_3R_19()) return true; - return false; - } - - private boolean jj_3R_22() { - Token xsp; - xsp = jj_scanpos; - if (jj_3R_26()) { - jj_scanpos = xsp; - if (jj_3R_27()) return true; - } - return false; - } - - private boolean jj_3R_14() { - if (jj_3R_19()) return true; - Token xsp; - while (true) { - xsp = jj_scanpos; - if (jj_3R_20()) { jj_scanpos = xsp; break; } - } - return false; - } - - private boolean jj_3R_28() { - if (jj_3R_31()) return true; - Token xsp; - xsp = jj_scanpos; - if (jj_3R_32()) jj_scanpos = xsp; - return false; - } - - private boolean jj_3R_10() { - if (jj_scan_token(COMMA)) return true; - if (jj_3R_9()) return true; - return false; - } - - private boolean jj_3R_7() { - if (jj_3R_9()) return true; - Token xsp; - while (true) { - xsp = jj_scanpos; - if (jj_3R_10()) { jj_scanpos = xsp; break; } - } - return false; - } - - private boolean jj_3R_36() { - if (jj_scan_token(STRING_LITERAL_LONG2)) return true; - return false; - } - - private boolean jj_3R_35() { - if (jj_scan_token(STRING_LITERAL_LONG1)) return true; - return false; - } - - private boolean jj_3R_34() { - if (jj_scan_token(STRING_LITERAL2)) return true; - return false; - } - - private boolean jj_3R_33() { - if (jj_scan_token(STRING_LITERAL1)) return true; - return false; - } - - private boolean jj_3R_21() { - if (jj_scan_token(TILDE)) return true; - if (jj_3R_13()) return true; - if (jj_scan_token(LPAREN)) return true; - if (jj_3R_14()) return true; - if (jj_scan_token(RPAREN)) return true; - return false; - } - - private boolean jj_3R_38() { - if (jj_scan_token(DATATYPE)) return true; - if (jj_3R_22()) return true; - return false; - } - - private boolean jj_3R_6() { - if (jj_3R_7()) return true; - if (jj_scan_token(ARROW)) return true; - if (jj_3R_8()) return true; - if (jj_scan_token(DOT)) return true; - return false; - } - - private boolean jj_3R_31() { - Token xsp; - xsp = jj_scanpos; - if (jj_3R_33()) { - jj_scanpos = xsp; - if (jj_3R_34()) { - jj_scanpos = xsp; - if (jj_3R_35()) { - jj_scanpos = xsp; - if (jj_3R_36()) return true; - } - } - } - return false; - } - - private boolean jj_3R_9() { - if (jj_3R_13()) return true; - if (jj_scan_token(LPAREN)) return true; - if (jj_3R_14()) return true; - if (jj_scan_token(RPAREN)) return true; - return false; - } - - private boolean jj_3R_29() { - if (jj_scan_token(IRI)) return true; - return false; - } - - private boolean jj_3_1() { - if (jj_3R_6()) return true; - return false; - } - - private boolean jj_3R_25() { - if (jj_scan_token(VAR)) return true; - return false; - } - - private boolean jj_3R_24() { - if (jj_3R_28()) return true; - return false; - } - - private boolean jj_3R_23() { - if (jj_3R_22()) return true; - return false; - } - - private boolean jj_3R_19() { - Token xsp; - xsp = jj_scanpos; - if (jj_3R_23()) { - jj_scanpos = xsp; - if (jj_3R_24()) { - jj_scanpos = xsp; - if (jj_3R_25()) return true; - } - } - return false; - } - - /** Generated Token Manager. */ - public JavaCCRuleParserTokenManager token_source; - SimpleCharStream jj_input_stream; - /** Current token. */ - public Token token; - /** Next token. */ - public Token jj_nt; - private int jj_ntk; - private Token jj_scanpos, jj_lastpos; - private int jj_la; - private int jj_gen; - final private int[] jj_la1 = new int[17]; - static private int[] jj_la1_0; - static private int[] jj_la1_1; - static { - jj_la1_init_0(); - jj_la1_init_1(); - } - private static void jj_la1_init_0() { - jj_la1_0 = new int[] {0x200,0x100,0x7000000,0x7000000,0x0,0x0,0x7000000,0x0,0x7000000,0x17780000,0x7000,0x20000000,0x20000000,0xc00,0x780000,0x7000000,0x6000000,}; - } - private static void jj_la1_init_1() { - jj_la1_1 = new int[] {0x0,0x0,0x10000000,0x10000000,0x200,0x200,0x10008000,0x200,0x10000000,0x0,0x0,0x200000,0x200000,0x0,0x0,0x0,0x0,}; - } - final private JJCalls[] jj_2_rtns = new JJCalls[1]; - private boolean jj_rescan = false; - private int jj_gc = 0; - - /** Constructor with InputStream. */ - public JavaCCRuleParser(java.io.InputStream stream) { - this(stream, null); - } - /** Constructor with InputStream and supplied encoding */ - public JavaCCRuleParser(java.io.InputStream stream, String encoding) { - try { jj_input_stream = new SimpleCharStream(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); } - token_source = new JavaCCRuleParserTokenManager(jj_input_stream); - token = new Token(); - jj_ntk = -1; - jj_gen = 0; - for (int i = 0; i < 17; i++) jj_la1[i] = -1; - for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); - } - - /** Reinitialise. */ - public void ReInit(java.io.InputStream stream) { - ReInit(stream, null); - } - /** Reinitialise. */ - public void ReInit(java.io.InputStream stream, String encoding) { - try { jj_input_stream.ReInit(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); } - token_source.ReInit(jj_input_stream); - token = new Token(); - jj_ntk = -1; - jj_gen = 0; - for (int i = 0; i < 17; i++) jj_la1[i] = -1; - for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); - } - - /** Constructor. */ - public JavaCCRuleParser(java.io.Reader stream) { - jj_input_stream = new SimpleCharStream(stream, 1, 1); - token_source = new JavaCCRuleParserTokenManager(jj_input_stream); - token = new Token(); - jj_ntk = -1; - jj_gen = 0; - for (int i = 0; i < 17; i++) jj_la1[i] = -1; - for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); - } - - /** Reinitialise. */ - public void ReInit(java.io.Reader stream) { - jj_input_stream.ReInit(stream, 1, 1); - token_source.ReInit(jj_input_stream); - token = new Token(); - jj_ntk = -1; - jj_gen = 0; - for (int i = 0; i < 17; i++) jj_la1[i] = -1; - for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); - } - - /** Constructor with generated Token Manager. */ - public JavaCCRuleParser(JavaCCRuleParserTokenManager tm) { - token_source = tm; - token = new Token(); - jj_ntk = -1; - jj_gen = 0; - for (int i = 0; i < 17; i++) jj_la1[i] = -1; - for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); - } - - /** Reinitialise. */ - public void ReInit(JavaCCRuleParserTokenManager tm) { - token_source = tm; - token = new Token(); - jj_ntk = -1; - jj_gen = 0; - for (int i = 0; i < 17; i++) jj_la1[i] = -1; - for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); - } - - private Token jj_consume_token(int kind) throws ParseException { - Token oldToken; - if ((oldToken = token).next != null) token = token.next; - else token = token.next = token_source.getNextToken(); - jj_ntk = -1; - if (token.kind == kind) { - jj_gen++; - if (++jj_gc > 100) { - jj_gc = 0; - for (int i = 0; i < jj_2_rtns.length; i++) { - JJCalls c = jj_2_rtns[i]; - while (c != null) { - if (c.gen < jj_gen) c.first = null; - c = c.next; - } - } - } - return token; - } - token = oldToken; - jj_kind = kind; - throw generateParseException(); - } - - static private final class LookaheadSuccess extends java.lang.Error { } - final private LookaheadSuccess jj_ls = new LookaheadSuccess(); - private boolean jj_scan_token(int kind) { - if (jj_scanpos == jj_lastpos) { - jj_la--; - if (jj_scanpos.next == null) { - jj_lastpos = jj_scanpos = jj_scanpos.next = token_source.getNextToken(); - } else { - jj_lastpos = jj_scanpos = jj_scanpos.next; - } - } else { - jj_scanpos = jj_scanpos.next; - } - if (jj_rescan) { - int i = 0; Token tok = token; - while (tok != null && tok != jj_scanpos) { i++; tok = tok.next; } - if (tok != null) jj_add_error_token(kind, i); - } - if (jj_scanpos.kind != kind) return true; - if (jj_la == 0 && jj_scanpos == jj_lastpos) throw jj_ls; - return false; - } - - -/** Get the next Token. */ - final public Token getNextToken() { - if (token.next != null) token = token.next; - else token = token.next = token_source.getNextToken(); - jj_ntk = -1; - jj_gen++; - return token; - } - -/** Get the specific Token. */ - final public Token getToken(int index) { - Token t = token; - for (int i = 0; i < index; i++) { - if (t.next != null) t = t.next; - else t = t.next = token_source.getNextToken(); - } - return t; - } - - private int jj_ntk() { - if ((jj_nt=token.next) == null) - return (jj_ntk = (token.next=token_source.getNextToken()).kind); - else - return (jj_ntk = jj_nt.kind); - } - - private java.util.List jj_expentries = new java.util.ArrayList(); - private int[] jj_expentry; - private int jj_kind = -1; - private int[] jj_lasttokens = new int[100]; - private int jj_endpos; - - private void jj_add_error_token(int kind, int pos) { - if (pos >= 100) return; - if (pos == jj_endpos + 1) { - jj_lasttokens[jj_endpos++] = kind; - } else if (jj_endpos != 0) { - jj_expentry = new int[jj_endpos]; - for (int i = 0; i < jj_endpos; i++) { - jj_expentry[i] = jj_lasttokens[i]; - } - jj_entries_loop: for (java.util.Iterator it = jj_expentries.iterator(); it.hasNext();) { - int[] oldentry = (int[])(it.next()); - if (oldentry.length == jj_expentry.length) { - for (int i = 0; i < jj_expentry.length; i++) { - if (oldentry[i] != jj_expentry[i]) { - continue jj_entries_loop; - } - } - jj_expentries.add(jj_expentry); - break jj_entries_loop; - } - } - if (pos != 0) jj_lasttokens[(jj_endpos = pos) - 1] = kind; - } - } - - /** Generate ParseException. */ - public ParseException generateParseException() { - jj_expentries.clear(); - boolean[] la1tokens = new boolean[63]; - if (jj_kind >= 0) { - la1tokens[jj_kind] = true; - jj_kind = -1; - } - for (int i = 0; i < 17; i++) { - if (jj_la1[i] == jj_gen) { - for (int j = 0; j < 32; j++) { - if ((jj_la1_0[i] & (1< jj_gen) { - jj_la = p.arg; jj_lastpos = jj_scanpos = p.first; - switch (i) { - case 0: jj_3_1(); break; - } - } - p = p.next; - } while (p != null); - } catch(LookaheadSuccess ls) { } - } - jj_rescan = false; - } - - private void jj_save(int index, int xla) { - JJCalls p = jj_2_rtns[index]; - while (p.gen > jj_gen) { - if (p.next == null) { p = p.next = new JJCalls(); break; } - p = p.next; - } - p.gen = jj_gen + xla - jj_la; p.first = token; p.arg = xla; - } - - static final class JJCalls { - int gen; - Token first; - int arg; - JJCalls next; - } - -} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParserTokenManager.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParserTokenManager.java deleted file mode 100644 index 6bea8ccdb..000000000 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParserTokenManager.java +++ /dev/null @@ -1,1520 +0,0 @@ -/* Generated By:JavaCC: Do not edit this line. JavaCCRuleParserTokenManager.java */ -package org.semanticweb.vlog4j.parser.implementation.javacc; - -/*- - * #%L - * vlog4j-parser - * %% - * Copyright (C) 2018 - 2019 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ -import java.util.List; -import java.util.ArrayList; -import org.semanticweb.vlog4j.parser.implementation.RuleParserBase; -import org.semanticweb.vlog4j.parser.implementation.PrologueException; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Constant; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePositiveLiteral; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeNegativeLiteral; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePositiveConjunction; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeConjunction; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeRule; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeVariable; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeConstant; - -/** Token Manager. */ -public class JavaCCRuleParserTokenManager implements JavaCCRuleParserConstants -{ - - /** Debug output. */ - public java.io.PrintStream debugStream = System.out; - /** Set debug output. */ - public void setDebugStream(java.io.PrintStream ds) { debugStream = ds; } -private int jjStopAtPos(int pos, int kind) -{ - jjmatchedKind = kind; - jjmatchedPos = pos; - return pos + 1; -} -private int jjMoveStringLiteralDfa0_0() -{ - switch(curChar) - { - case 9: - jjmatchedKind = 2; - return jjMoveNfa_0(0, 0); - case 10: - jjmatchedKind = 3; - return jjMoveNfa_0(0, 0); - case 12: - jjmatchedKind = 5; - return jjMoveNfa_0(0, 0); - case 13: - jjmatchedKind = 4; - return jjMoveNfa_0(0, 0); - case 32: - jjmatchedKind = 1; - return jjMoveNfa_0(0, 0); - case 36: - jjmatchedKind = 45; - return jjMoveNfa_0(0, 0); - case 40: - jjmatchedKind = 32; - return jjMoveNfa_0(0, 0); - case 41: - jjmatchedKind = 33; - return jjMoveNfa_0(0, 0); - case 42: - jjmatchedKind = 49; - return jjMoveNfa_0(0, 0); - case 44: - jjmatchedKind = 41; - return jjMoveNfa_0(0, 0); - case 46: - jjmatchedKind = 42; - return jjMoveNfa_0(0, 0); - case 47: - jjmatchedKind = 50; - return jjMoveNfa_0(0, 0); - case 58: - jjmatchedKind = 48; - return jjMoveStringLiteralDfa1_0(0x100000000000L); - case 59: - jjmatchedKind = 40; - return jjMoveNfa_0(0, 0); - case 61: - jjmatchedKind = 43; - return jjMoveNfa_0(0, 0); - case 63: - jjmatchedKind = 46; - return jjMoveNfa_0(0, 0); - case 64: - jjmatchedKind = 54; - return jjMoveStringLiteralDfa1_0(0x300L); - case 70: - return jjMoveStringLiteralDfa1_0(0x800L); - case 84: - return jjMoveStringLiteralDfa1_0(0x400L); - case 91: - jjmatchedKind = 37; - return jjMoveNfa_0(0, 0); - case 92: - jjmatchedKind = 51; - return jjMoveNfa_0(0, 0); - case 93: - jjmatchedKind = 38; - return jjMoveNfa_0(0, 0); - case 94: - return jjMoveStringLiteralDfa1_0(0x20000000000000L); - case 102: - return jjMoveStringLiteralDfa1_0(0x800L); - case 116: - return jjMoveStringLiteralDfa1_0(0x400L); - case 123: - jjmatchedKind = 35; - return jjMoveNfa_0(0, 0); - case 125: - jjmatchedKind = 36; - return jjMoveNfa_0(0, 0); - case 126: - jjmatchedKind = 47; - return jjMoveNfa_0(0, 0); - case 65279: - jjmatchedKind = 52; - return jjMoveNfa_0(0, 0); - default : - return jjMoveNfa_0(0, 0); - } -} -private int jjMoveStringLiteralDfa1_0(long active0) -{ - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - return jjMoveNfa_0(0, 0); - } - switch(curChar) - { - case 45: - if ((active0 & 0x100000000000L) != 0L) - { - jjmatchedKind = 44; - jjmatchedPos = 1; - } - break; - case 65: - return jjMoveStringLiteralDfa2_0(active0, 0x800L); - case 82: - return jjMoveStringLiteralDfa2_0(active0, 0x400L); - case 94: - if ((active0 & 0x20000000000000L) != 0L) - { - jjmatchedKind = 53; - jjmatchedPos = 1; - } - break; - case 97: - return jjMoveStringLiteralDfa2_0(active0, 0x800L); - case 98: - return jjMoveStringLiteralDfa2_0(active0, 0x200L); - case 112: - return jjMoveStringLiteralDfa2_0(active0, 0x100L); - case 114: - return jjMoveStringLiteralDfa2_0(active0, 0x400L); - default : - break; - } - return jjMoveNfa_0(0, 1); -} -private int jjMoveStringLiteralDfa2_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjMoveNfa_0(0, 1); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - return jjMoveNfa_0(0, 1); - } - switch(curChar) - { - case 76: - return jjMoveStringLiteralDfa3_0(active0, 0x800L); - case 85: - return jjMoveStringLiteralDfa3_0(active0, 0x400L); - case 97: - return jjMoveStringLiteralDfa3_0(active0, 0x200L); - case 108: - return jjMoveStringLiteralDfa3_0(active0, 0x800L); - case 114: - return jjMoveStringLiteralDfa3_0(active0, 0x100L); - case 117: - return jjMoveStringLiteralDfa3_0(active0, 0x400L); - default : - break; - } - return jjMoveNfa_0(0, 2); -} -private int jjMoveStringLiteralDfa3_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjMoveNfa_0(0, 2); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - return jjMoveNfa_0(0, 2); - } - switch(curChar) - { - case 69: - if ((active0 & 0x400L) != 0L) - { - jjmatchedKind = 10; - jjmatchedPos = 3; - } - break; - case 83: - return jjMoveStringLiteralDfa4_0(active0, 0x800L); - case 101: - if ((active0 & 0x400L) != 0L) - { - jjmatchedKind = 10; - jjmatchedPos = 3; - } - return jjMoveStringLiteralDfa4_0(active0, 0x100L); - case 115: - return jjMoveStringLiteralDfa4_0(active0, 0xa00L); - default : - break; - } - return jjMoveNfa_0(0, 3); -} -private int jjMoveStringLiteralDfa4_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjMoveNfa_0(0, 3); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - return jjMoveNfa_0(0, 3); - } - switch(curChar) - { - case 69: - if ((active0 & 0x800L) != 0L) - { - jjmatchedKind = 11; - jjmatchedPos = 4; - } - break; - case 101: - if ((active0 & 0x200L) != 0L) - { - jjmatchedKind = 9; - jjmatchedPos = 4; - } - else if ((active0 & 0x800L) != 0L) - { - jjmatchedKind = 11; - jjmatchedPos = 4; - } - break; - case 102: - return jjMoveStringLiteralDfa5_0(active0, 0x100L); - default : - break; - } - return jjMoveNfa_0(0, 4); -} -private int jjMoveStringLiteralDfa5_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjMoveNfa_0(0, 4); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - return jjMoveNfa_0(0, 4); - } - switch(curChar) - { - case 105: - return jjMoveStringLiteralDfa6_0(active0, 0x100L); - default : - break; - } - return jjMoveNfa_0(0, 5); -} -private int jjMoveStringLiteralDfa6_0(long old0, long active0) -{ - if (((active0 &= old0)) == 0L) - return jjMoveNfa_0(0, 5); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { - return jjMoveNfa_0(0, 5); - } - switch(curChar) - { - case 120: - if ((active0 & 0x100L) != 0L) - { - jjmatchedKind = 8; - jjmatchedPos = 6; - } - break; - default : - break; - } - return jjMoveNfa_0(0, 6); -} -static final long[] jjbitVec0 = { - 0xfffffffffffffffeL, 0xffffffffffffffffL, 0xffffffffffffffffL, 0xffffffffffffffffL -}; -static final long[] jjbitVec2 = { - 0x0L, 0x0L, 0xffffffffffffffffL, 0xffffffffffffffffL -}; -static final long[] jjbitVec3 = { - 0xfffe7000fffffff6L, 0xffffffffffffffffL, 0xffffffffffffffffL, 0x7e00000000ffffffL -}; -static final long[] jjbitVec4 = { - 0x0L, 0x0L, 0x0L, 0xff7fffffff7fffffL -}; -static final long[] jjbitVec5 = { - 0x0L, 0xbfff000000000000L, 0xffffffffffffffffL, 0xffffffffffffffffL -}; -static final long[] jjbitVec6 = { - 0x3000L, 0xffff000000000000L, 0xffffffffffffffffL, 0xffffffffffffffffL -}; -static final long[] jjbitVec7 = { - 0xffffffffffffffffL, 0xffffffffffffffffL, 0xffffL, 0x0L -}; -static final long[] jjbitVec8 = { - 0xffffffffffffffffL, 0xffffffffffffffffL, 0xffffffffffffffffL, 0xffffffffffffL -}; -static final long[] jjbitVec9 = { - 0xffffffffffffffffL, 0xffffffffffffffffL, 0xffffffffffffffffL, 0x3fffffffffffffffL -}; -static final long[] jjbitVec10 = { - 0x0L, 0x0L, 0x80000000000000L, 0xff7fffffff7fffffL -}; -static final long[] jjbitVec11 = { - 0xffffffffffffffffL, 0xbfffffffffffffffL, 0xffffffffffffffffL, 0xffffffffffffffffL -}; -static final long[] jjbitVec12 = { - 0x8000000000003000L, 0xffff000000000001L, 0xffffffffffffffffL, 0xffffffffffffffffL -}; -private int jjMoveNfa_0(int startState, int curPos) -{ - int strKind = jjmatchedKind; - int strPos = jjmatchedPos; - int seenUpto; - input_stream.backup(seenUpto = curPos + 1); - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { throw new Error("Internal Error"); } - curPos = 0; - int startsAt = 0; - jjnewStateCnt = 114; - int i = 1; - jjstateSet[0] = startState; - int kind = 0x7fffffff; - for (;;) - { - if (++jjround == 0x7fffffff) - ReInitRounds(); - if (curChar < 64) - { - long l = 1L << curChar; - do - { - switch(jjstateSet[--i]) - { - case 0: - if ((0x3ff000000000000L & l) != 0L) - { - if (kind > 12) - kind = 12; - jjCheckNAddStates(0, 7); - } - else if ((0x280000000000L & l) != 0L) - jjCheckNAddStates(8, 12); - else if (curChar == 58) - { - if (kind > 25) - kind = 25; - jjCheckNAdd(110); - } - else if (curChar == 46) - jjCheckNAddTwoStates(83, 85); - else if (curChar == 37) - jjCheckNAddTwoStates(75, 76); - else if (curChar == 40) - jjCheckNAddStates(13, 15); - else if (curChar == 60) - jjCheckNAddTwoStates(42, 43); - else if (curChar == 34) - jjstateSet[jjnewStateCnt++] = 39; - else if (curChar == 39) - jjstateSet[jjnewStateCnt++] = 27; - else if (curChar == 35) - { - if (kind > 7) - kind = 7; - jjCheckNAddStates(16, 18); - } - else if (curChar == 63) - jjstateSet[jjnewStateCnt++] = 50; - if ((0x3ff000000000000L & l) != 0L) - { - if (kind > 60) - kind = 60; - jjCheckNAdd(73); - } - else if (curChar == 34) - jjCheckNAddStates(19, 21); - else if (curChar == 39) - jjCheckNAddStates(22, 24); - break; - case 1: - if ((0xffffffffffffdbffL & l) == 0L) - break; - if (kind > 7) - kind = 7; - jjCheckNAddStates(16, 18); - break; - case 2: - if ((0x2400L & l) != 0L && kind > 7) - kind = 7; - break; - case 3: - if (curChar == 10 && kind > 7) - kind = 7; - break; - case 4: - if (curChar == 13) - jjstateSet[jjnewStateCnt++] = 3; - break; - case 6: - if ((0x8400000000L & l) != 0L && kind > 18) - kind = 18; - break; - case 7: - if (curChar == 39) - jjCheckNAddStates(22, 24); - break; - case 8: - if ((0xffffff7fffffdbffL & l) != 0L) - jjCheckNAddStates(22, 24); - break; - case 10: - if ((0x8400000000L & l) != 0L) - jjCheckNAddStates(22, 24); - break; - case 11: - if (curChar == 39 && kind > 19) - kind = 19; - break; - case 12: - if (curChar == 34) - jjCheckNAddStates(19, 21); - break; - case 13: - if ((0xfffffffbffffdbffL & l) != 0L) - jjCheckNAddStates(19, 21); - break; - case 15: - if ((0x8400000000L & l) != 0L) - jjCheckNAddStates(19, 21); - break; - case 16: - if (curChar == 34 && kind > 20) - kind = 20; - break; - case 17: - if (curChar == 39) - jjCheckNAddStates(25, 28); - break; - case 18: - case 22: - if ((0xffffff7fffffffffL & l) != 0L) - jjCheckNAddStates(25, 28); - break; - case 20: - if ((0x8400000000L & l) != 0L) - jjCheckNAddStates(25, 28); - break; - case 21: - case 24: - if (curChar == 39) - jjCheckNAdd(22); - break; - case 23: - if (curChar == 39) - jjAddStates(29, 30); - break; - case 25: - if (curChar == 39 && kind > 21) - kind = 21; - break; - case 26: - if (curChar == 39) - jjstateSet[jjnewStateCnt++] = 25; - break; - case 27: - if (curChar == 39) - jjstateSet[jjnewStateCnt++] = 17; - break; - case 28: - if (curChar == 39) - jjstateSet[jjnewStateCnt++] = 27; - break; - case 29: - if (curChar == 34) - jjCheckNAddStates(31, 34); - break; - case 30: - case 34: - if ((0xfffffffbffffffffL & l) != 0L) - jjCheckNAddStates(31, 34); - break; - case 32: - if ((0x8400000000L & l) != 0L) - jjCheckNAddStates(31, 34); - break; - case 33: - case 36: - if (curChar == 34) - jjCheckNAdd(34); - break; - case 35: - if (curChar == 34) - jjAddStates(35, 36); - break; - case 37: - if (curChar == 34 && kind > 22) - kind = 22; - break; - case 38: - if (curChar == 34) - jjstateSet[jjnewStateCnt++] = 37; - break; - case 39: - if (curChar == 34) - jjstateSet[jjnewStateCnt++] = 29; - break; - case 40: - if (curChar == 34) - jjstateSet[jjnewStateCnt++] = 39; - break; - case 41: - if (curChar == 60) - jjCheckNAddTwoStates(42, 43); - break; - case 42: - if ((0xaffffffa00000000L & l) != 0L) - jjCheckNAddTwoStates(42, 43); - break; - case 43: - if (curChar == 62 && kind > 24) - kind = 24; - break; - case 44: - if (curChar == 58) - jjstateSet[jjnewStateCnt++] = 45; - break; - case 45: - if ((0x3ff000000000000L & l) == 0L) - break; - if (kind > 27) - kind = 27; - jjCheckNAddTwoStates(46, 47); - break; - case 46: - if ((0x3ff600000000000L & l) != 0L) - jjCheckNAddTwoStates(46, 47); - break; - case 47: - if ((0x3ff200000000000L & l) != 0L && kind > 27) - kind = 27; - break; - case 49: - if (curChar == 63) - jjstateSet[jjnewStateCnt++] = 50; - break; - case 50: - case 51: - if ((0x3ff000000000000L & l) == 0L) - break; - if (kind > 28) - kind = 28; - jjCheckNAdd(51); - break; - case 54: - if (curChar == 45) - jjCheckNAdd(55); - break; - case 55: - if ((0x3ff000000000000L & l) == 0L) - break; - if (kind > 29) - kind = 29; - jjCheckNAddTwoStates(54, 55); - break; - case 56: - if (curChar == 40) - jjCheckNAddStates(13, 15); - break; - case 57: - if (curChar == 35) - jjCheckNAddStates(37, 42); - break; - case 58: - if ((0xffffffffffffdbffL & l) != 0L) - jjCheckNAddStates(37, 42); - break; - case 59: - if ((0x2400L & l) != 0L) - jjCheckNAddStates(13, 15); - break; - case 60: - if ((0x100003600L & l) != 0L) - jjCheckNAddStates(13, 15); - break; - case 61: - if (curChar == 41 && kind > 34) - kind = 34; - break; - case 62: - if (curChar == 10) - jjCheckNAddStates(13, 15); - break; - case 63: - if (curChar == 13) - jjstateSet[jjnewStateCnt++] = 62; - break; - case 65: - if (curChar == 35) - jjCheckNAddStates(43, 48); - break; - case 66: - if ((0xffffffffffffdbffL & l) != 0L) - jjCheckNAddStates(43, 48); - break; - case 67: - if ((0x2400L & l) != 0L) - jjCheckNAddStates(49, 51); - break; - case 68: - if ((0x100003600L & l) != 0L) - jjCheckNAddStates(49, 51); - break; - case 70: - if (curChar == 10) - jjCheckNAddStates(49, 51); - break; - case 71: - if (curChar == 13) - jjstateSet[jjnewStateCnt++] = 70; - break; - case 72: - case 73: - if ((0x3ff000000000000L & l) == 0L) - break; - if (kind > 60) - kind = 60; - jjCheckNAdd(73); - break; - case 74: - if (curChar == 37) - jjCheckNAddTwoStates(75, 76); - break; - case 75: - if ((0xfffffffffffffbffL & l) != 0L) - jjCheckNAddTwoStates(75, 76); - break; - case 76: - if (curChar == 10 && kind > 62) - kind = 62; - break; - case 77: - if ((0x280000000000L & l) != 0L) - jjCheckNAddStates(8, 12); - break; - case 78: - if ((0x3ff000000000000L & l) == 0L) - break; - if (kind > 12) - kind = 12; - jjCheckNAdd(78); - break; - case 79: - if ((0x3ff000000000000L & l) != 0L) - jjCheckNAddTwoStates(79, 80); - break; - case 80: - if (curChar != 46) - break; - if (kind > 13) - kind = 13; - jjCheckNAdd(81); - break; - case 81: - if ((0x3ff000000000000L & l) == 0L) - break; - if (kind > 13) - kind = 13; - jjCheckNAdd(81); - break; - case 82: - if (curChar == 46) - jjCheckNAdd(83); - break; - case 83: - if ((0x3ff000000000000L & l) == 0L) - break; - if (kind > 13) - kind = 13; - jjCheckNAdd(83); - break; - case 84: - if (curChar == 46) - jjCheckNAdd(85); - break; - case 85: - if ((0x3ff000000000000L & l) != 0L) - jjCheckNAddTwoStates(85, 86); - break; - case 87: - if ((0x280000000000L & l) != 0L) - jjCheckNAdd(88); - break; - case 88: - if ((0x3ff000000000000L & l) == 0L) - break; - if (kind > 14) - kind = 14; - jjCheckNAdd(88); - break; - case 89: - if ((0x3ff000000000000L & l) != 0L) - jjCheckNAddStates(52, 55); - break; - case 90: - if ((0x3ff000000000000L & l) != 0L) - jjCheckNAddTwoStates(90, 91); - break; - case 91: - if (curChar == 46) - jjCheckNAddTwoStates(92, 93); - break; - case 92: - if ((0x3ff000000000000L & l) != 0L) - jjCheckNAddTwoStates(92, 93); - break; - case 94: - if ((0x280000000000L & l) != 0L) - jjCheckNAdd(95); - break; - case 95: - if ((0x3ff000000000000L & l) == 0L) - break; - if (kind > 14) - kind = 14; - jjCheckNAdd(95); - break; - case 96: - if ((0x3ff000000000000L & l) != 0L) - jjCheckNAddTwoStates(96, 97); - break; - case 98: - if ((0x280000000000L & l) != 0L) - jjCheckNAdd(99); - break; - case 99: - if ((0x3ff000000000000L & l) == 0L) - break; - if (kind > 14) - kind = 14; - jjCheckNAdd(99); - break; - case 100: - if ((0x3ff000000000000L & l) == 0L) - break; - if (kind > 12) - kind = 12; - jjCheckNAddStates(0, 7); - break; - case 101: - if ((0x3ff000000000000L & l) == 0L) - break; - if (kind > 23) - kind = 23; - jjCheckNAdd(101); - break; - case 102: - if (curChar == 46) - jjCheckNAddTwoStates(83, 85); - break; - case 104: - if ((0x3ff600000000000L & l) != 0L) - jjAddStates(56, 57); - break; - case 105: - if ((0x3ff200000000000L & l) != 0L) - jjstateSet[jjnewStateCnt++] = 106; - break; - case 106: - if (curChar == 58 && kind > 25) - kind = 25; - break; - case 107: - if ((0x3ff600000000000L & l) != 0L) - jjAddStates(58, 59); - break; - case 108: - if ((0x3ff200000000000L & l) != 0L) - jjstateSet[jjnewStateCnt++] = 109; - break; - case 109: - if (curChar == 58) - jjCheckNAdd(110); - break; - case 110: - if ((0x3ff000000000000L & l) == 0L) - break; - if (kind > 26) - kind = 26; - jjCheckNAddTwoStates(111, 112); - break; - case 111: - if ((0x3ff600000000000L & l) != 0L) - jjCheckNAddTwoStates(111, 112); - break; - case 112: - if ((0x3ff200000000000L & l) != 0L && kind > 26) - kind = 26; - break; - case 113: - if (curChar != 58) - break; - if (kind > 25) - kind = 25; - jjCheckNAdd(110); - break; - default : break; - } - } while(i != startsAt); - } - else if (curChar < 128) - { - long l = 1L << (curChar & 077); - do - { - switch(jjstateSet[--i]) - { - case 0: - if ((0x7fffffe87fffffeL & l) != 0L) - { - if (kind > 60) - kind = 60; - jjCheckNAdd(73); - } - else if (curChar == 91) - jjCheckNAddStates(49, 51); - else if (curChar == 64) - jjCheckNAdd(53); - else if (curChar == 92) - jjstateSet[jjnewStateCnt++] = 6; - if ((0x7fffffe07fffffeL & l) != 0L) - jjCheckNAddStates(60, 65); - else if (curChar == 95) - jjstateSet[jjnewStateCnt++] = 44; - break; - case 1: - if (kind > 7) - kind = 7; - jjAddStates(16, 18); - break; - case 5: - if (curChar == 92) - jjstateSet[jjnewStateCnt++] = 6; - break; - case 6: - if ((0x14404410144044L & l) != 0L && kind > 18) - kind = 18; - break; - case 8: - if ((0xffffffffefffffffL & l) != 0L) - jjCheckNAddStates(22, 24); - break; - case 9: - if (curChar == 92) - jjstateSet[jjnewStateCnt++] = 10; - break; - case 10: - if ((0x14404410144044L & l) != 0L) - jjCheckNAddStates(22, 24); - break; - case 13: - if ((0xffffffffefffffffL & l) != 0L) - jjCheckNAddStates(19, 21); - break; - case 14: - if (curChar == 92) - jjstateSet[jjnewStateCnt++] = 15; - break; - case 15: - if ((0x14404410144044L & l) != 0L) - jjCheckNAddStates(19, 21); - break; - case 18: - if ((0xffffffffefffffffL & l) != 0L) - jjCheckNAddStates(25, 28); - break; - case 19: - if (curChar == 92) - jjstateSet[jjnewStateCnt++] = 20; - break; - case 20: - if ((0x14404410144044L & l) != 0L) - jjCheckNAddStates(25, 28); - break; - case 22: - jjCheckNAddStates(25, 28); - break; - case 30: - if ((0xffffffffefffffffL & l) != 0L) - jjCheckNAddStates(31, 34); - break; - case 31: - if (curChar == 92) - jjstateSet[jjnewStateCnt++] = 32; - break; - case 32: - if ((0x14404410144044L & l) != 0L) - jjCheckNAddStates(31, 34); - break; - case 34: - jjCheckNAddStates(31, 34); - break; - case 42: - if ((0xc7fffffeafffffffL & l) != 0L) - jjAddStates(66, 67); - break; - case 45: - if ((0x7fffffe87fffffeL & l) == 0L) - break; - if (kind > 27) - kind = 27; - jjCheckNAddTwoStates(46, 47); - break; - case 46: - if ((0x7fffffe87fffffeL & l) != 0L) - jjCheckNAddTwoStates(46, 47); - break; - case 47: - if ((0x7fffffe87fffffeL & l) != 0L && kind > 27) - kind = 27; - break; - case 48: - if (curChar == 95) - jjstateSet[jjnewStateCnt++] = 44; - break; - case 50: - case 51: - if ((0x7fffffe87fffffeL & l) == 0L) - break; - if (kind > 28) - kind = 28; - jjCheckNAdd(51); - break; - case 52: - if (curChar == 64) - jjCheckNAdd(53); - break; - case 53: - if ((0x7fffffe07fffffeL & l) == 0L) - break; - if (kind > 29) - kind = 29; - jjCheckNAddTwoStates(53, 54); - break; - case 55: - if ((0x7fffffe07fffffeL & l) == 0L) - break; - if (kind > 29) - kind = 29; - jjCheckNAddTwoStates(54, 55); - break; - case 58: - jjAddStates(37, 42); - break; - case 64: - if (curChar == 91) - jjCheckNAddStates(49, 51); - break; - case 66: - jjCheckNAddStates(43, 48); - break; - case 69: - if (curChar == 93 && kind > 39) - kind = 39; - break; - case 72: - case 73: - if ((0x7fffffe87fffffeL & l) == 0L) - break; - if (kind > 60) - kind = 60; - jjCheckNAdd(73); - break; - case 75: - jjAddStates(68, 69); - break; - case 86: - if ((0x2000000020L & l) != 0L) - jjAddStates(70, 71); - break; - case 93: - if ((0x2000000020L & l) != 0L) - jjAddStates(72, 73); - break; - case 97: - if ((0x2000000020L & l) != 0L) - jjAddStates(74, 75); - break; - case 103: - if ((0x7fffffe07fffffeL & l) != 0L) - jjCheckNAddStates(60, 65); - break; - case 104: - if ((0x7fffffe87fffffeL & l) != 0L) - jjCheckNAddTwoStates(104, 105); - break; - case 105: - if ((0x7fffffe87fffffeL & l) != 0L) - jjCheckNAdd(106); - break; - case 107: - if ((0x7fffffe87fffffeL & l) != 0L) - jjCheckNAddTwoStates(107, 108); - break; - case 108: - if ((0x7fffffe87fffffeL & l) != 0L) - jjCheckNAdd(109); - break; - case 110: - if ((0x7fffffe87fffffeL & l) == 0L) - break; - if (kind > 26) - kind = 26; - jjCheckNAddTwoStates(111, 112); - break; - case 111: - if ((0x7fffffe87fffffeL & l) != 0L) - jjCheckNAddTwoStates(111, 112); - break; - case 112: - if ((0x7fffffe87fffffeL & l) != 0L && kind > 26) - kind = 26; - break; - default : break; - } - } while(i != startsAt); - } - else - { - int hiByte = (int)(curChar >> 8); - int i1 = hiByte >> 6; - long l1 = 1L << (hiByte & 077); - int i2 = (curChar & 0xff) >> 6; - long l2 = 1L << (curChar & 077); - do - { - switch(jjstateSet[--i]) - { - case 0: - if (jjCanMove_1(hiByte, i1, i2, l1, l2)) - { - if (kind > 60) - kind = 60; - jjCheckNAdd(73); - } - if (jjCanMove_1(hiByte, i1, i2, l1, l2)) - jjCheckNAddStates(60, 65); - break; - case 1: - if (!jjCanMove_0(hiByte, i1, i2, l1, l2)) - break; - if (kind > 7) - kind = 7; - jjAddStates(16, 18); - break; - case 8: - if (jjCanMove_0(hiByte, i1, i2, l1, l2)) - jjAddStates(22, 24); - break; - case 13: - if (jjCanMove_0(hiByte, i1, i2, l1, l2)) - jjAddStates(19, 21); - break; - case 18: - case 22: - if (jjCanMove_0(hiByte, i1, i2, l1, l2)) - jjCheckNAddStates(25, 28); - break; - case 30: - case 34: - if (jjCanMove_0(hiByte, i1, i2, l1, l2)) - jjCheckNAddStates(31, 34); - break; - case 42: - if (jjCanMove_0(hiByte, i1, i2, l1, l2)) - jjAddStates(66, 67); - break; - case 45: - if (!jjCanMove_1(hiByte, i1, i2, l1, l2)) - break; - if (kind > 27) - kind = 27; - jjCheckNAddTwoStates(46, 47); - break; - case 46: - if (jjCanMove_2(hiByte, i1, i2, l1, l2)) - jjCheckNAddTwoStates(46, 47); - break; - case 47: - if (jjCanMove_2(hiByte, i1, i2, l1, l2) && kind > 27) - kind = 27; - break; - case 50: - if (!jjCanMove_1(hiByte, i1, i2, l1, l2)) - break; - if (kind > 28) - kind = 28; - jjCheckNAdd(51); - break; - case 51: - if (!jjCanMove_2(hiByte, i1, i2, l1, l2)) - break; - if (kind > 28) - kind = 28; - jjCheckNAdd(51); - break; - case 58: - if (jjCanMove_0(hiByte, i1, i2, l1, l2)) - jjAddStates(37, 42); - break; - case 66: - if (jjCanMove_0(hiByte, i1, i2, l1, l2)) - jjAddStates(43, 48); - break; - case 72: - if (!jjCanMove_1(hiByte, i1, i2, l1, l2)) - break; - if (kind > 60) - kind = 60; - jjCheckNAdd(73); - break; - case 73: - if (!jjCanMove_2(hiByte, i1, i2, l1, l2)) - break; - if (kind > 60) - kind = 60; - jjCheckNAdd(73); - break; - case 75: - if (jjCanMove_0(hiByte, i1, i2, l1, l2)) - jjAddStates(68, 69); - break; - case 103: - if (jjCanMove_1(hiByte, i1, i2, l1, l2)) - jjCheckNAddStates(60, 65); - break; - case 104: - if (jjCanMove_2(hiByte, i1, i2, l1, l2)) - jjCheckNAddTwoStates(104, 105); - break; - case 105: - if (jjCanMove_2(hiByte, i1, i2, l1, l2)) - jjCheckNAdd(106); - break; - case 107: - if (jjCanMove_2(hiByte, i1, i2, l1, l2)) - jjCheckNAddTwoStates(107, 108); - break; - case 108: - if (jjCanMove_2(hiByte, i1, i2, l1, l2)) - jjCheckNAdd(109); - break; - case 110: - if (!jjCanMove_1(hiByte, i1, i2, l1, l2)) - break; - if (kind > 26) - kind = 26; - jjCheckNAddTwoStates(111, 112); - break; - case 111: - if (jjCanMove_2(hiByte, i1, i2, l1, l2)) - jjCheckNAddTwoStates(111, 112); - break; - case 112: - if (jjCanMove_2(hiByte, i1, i2, l1, l2) && kind > 26) - kind = 26; - break; - default : break; - } - } while(i != startsAt); - } - if (kind != 0x7fffffff) - { - jjmatchedKind = kind; - jjmatchedPos = curPos; - kind = 0x7fffffff; - } - ++curPos; - if ((i = jjnewStateCnt) == (startsAt = 114 - (jjnewStateCnt = startsAt))) - break; - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { break; } - } - if (jjmatchedPos > strPos) - return curPos; - - int toRet = Math.max(curPos, seenUpto); - - if (curPos < toRet) - for (i = toRet - Math.min(curPos, seenUpto); i-- > 0; ) - try { curChar = input_stream.readChar(); } - catch(java.io.IOException e) { throw new Error("Internal Error : Please send a bug report."); } - - if (jjmatchedPos < strPos) - { - jjmatchedKind = strKind; - jjmatchedPos = strPos; - } - else if (jjmatchedPos == strPos && jjmatchedKind > strKind) - jjmatchedKind = strKind; - - return toRet; -} -static final int[] jjnextStates = { - 78, 79, 80, 90, 91, 96, 97, 101, 78, 79, 82, 84, 89, 57, 60, 61, - 1, 2, 4, 13, 14, 16, 8, 9, 11, 18, 19, 21, 23, 24, 26, 30, - 31, 33, 35, 36, 38, 57, 58, 59, 63, 60, 61, 65, 66, 67, 71, 68, - 69, 65, 68, 69, 90, 91, 96, 97, 104, 105, 107, 108, 104, 105, 106, 107, - 108, 109, 42, 43, 75, 76, 87, 88, 94, 95, 98, 99, -}; -private static final boolean jjCanMove_0(int hiByte, int i1, int i2, long l1, long l2) -{ - switch(hiByte) - { - case 0: - return ((jjbitVec2[i2] & l2) != 0L); - default : - if ((jjbitVec0[i1] & l1) != 0L) - return true; - return false; - } -} -private static final boolean jjCanMove_1(int hiByte, int i1, int i2, long l1, long l2) -{ - switch(hiByte) - { - case 0: - return ((jjbitVec4[i2] & l2) != 0L); - case 3: - return ((jjbitVec5[i2] & l2) != 0L); - case 32: - return ((jjbitVec6[i2] & l2) != 0L); - case 33: - return ((jjbitVec7[i2] & l2) != 0L); - case 47: - return ((jjbitVec8[i2] & l2) != 0L); - case 48: - return ((jjbitVec0[i2] & l2) != 0L); - case 255: - return ((jjbitVec9[i2] & l2) != 0L); - default : - if ((jjbitVec3[i1] & l1) != 0L) - return true; - return false; - } -} -private static final boolean jjCanMove_2(int hiByte, int i1, int i2, long l1, long l2) -{ - switch(hiByte) - { - case 0: - return ((jjbitVec10[i2] & l2) != 0L); - case 3: - return ((jjbitVec11[i2] & l2) != 0L); - case 32: - return ((jjbitVec12[i2] & l2) != 0L); - case 33: - return ((jjbitVec7[i2] & l2) != 0L); - case 47: - return ((jjbitVec8[i2] & l2) != 0L); - case 48: - return ((jjbitVec0[i2] & l2) != 0L); - case 255: - return ((jjbitVec9[i2] & l2) != 0L); - default : - if ((jjbitVec3[i1] & l1) != 0L) - return true; - return false; - } -} - -/** Token literal values. */ -public static final String[] jjstrLiteralImages = { -"", null, null, null, null, null, null, null, "\100\160\162\145\146\151\170", -"\100\142\141\163\145", null, null, null, null, null, null, null, null, null, null, null, null, null, -null, null, null, null, null, null, null, null, null, "\50", "\51", null, "\173", -"\175", "\133", "\135", null, "\73", "\54", "\56", "\75", "\72\55", "\44", "\77", -"\176", "\72", "\52", "\57", "\134", "\ufeff", "\136\136", "\100", null, null, null, -null, null, null, null, null, }; - -/** Lexer state names. */ -public static final String[] lexStateNames = { - "DEFAULT", -}; -static final long[] jjtoToken = { - 0x107fffff3ffc7f01L, -}; -static final long[] jjtoSkip = { - 0x40000000000000beL, -}; -static final long[] jjtoSpecial = { - 0x80L, -}; -protected SimpleCharStream input_stream; -private final int[] jjrounds = new int[114]; -private final int[] jjstateSet = new int[228]; -protected char curChar; -/** Constructor. */ -public JavaCCRuleParserTokenManager(SimpleCharStream stream){ - if (SimpleCharStream.staticFlag) - throw new Error("ERROR: Cannot use a static CharStream class with a non-static lexical analyzer."); - input_stream = stream; -} - -/** Constructor. */ -public JavaCCRuleParserTokenManager(SimpleCharStream stream, int lexState){ - this(stream); - SwitchTo(lexState); -} - -/** Reinitialise parser. */ -public void ReInit(SimpleCharStream stream) -{ - jjmatchedPos = jjnewStateCnt = 0; - curLexState = defaultLexState; - input_stream = stream; - ReInitRounds(); -} -private void ReInitRounds() -{ - int i; - jjround = 0x80000001; - for (i = 114; i-- > 0;) - jjrounds[i] = 0x80000000; -} - -/** Reinitialise parser. */ -public void ReInit(SimpleCharStream stream, int lexState) -{ - ReInit(stream); - SwitchTo(lexState); -} - -/** Switch to specified lex state. */ -public void SwitchTo(int lexState) -{ - if (lexState >= 1 || lexState < 0) - throw new TokenMgrError("Error: Ignoring invalid lexical state : " + lexState + ". State unchanged.", TokenMgrError.INVALID_LEXICAL_STATE); - else - curLexState = lexState; -} - -protected Token jjFillToken() -{ - final Token t; - final String curTokenImage; - final int beginLine; - final int endLine; - final int beginColumn; - final int endColumn; - String im = jjstrLiteralImages[jjmatchedKind]; - curTokenImage = (im == null) ? input_stream.GetImage() : im; - beginLine = input_stream.getBeginLine(); - beginColumn = input_stream.getBeginColumn(); - endLine = input_stream.getEndLine(); - endColumn = input_stream.getEndColumn(); - t = Token.newToken(jjmatchedKind, curTokenImage); - - t.beginLine = beginLine; - t.endLine = endLine; - t.beginColumn = beginColumn; - t.endColumn = endColumn; - - return t; -} - -int curLexState = 0; -int defaultLexState = 0; -int jjnewStateCnt; -int jjround; -int jjmatchedPos; -int jjmatchedKind; - -/** Get the next Token. */ -public Token getNextToken() -{ - Token specialToken = null; - Token matchedToken; - int curPos = 0; - - EOFLoop : - for (;;) - { - try - { - curChar = input_stream.BeginToken(); - } - catch(java.io.IOException e) - { - jjmatchedKind = 0; - matchedToken = jjFillToken(); - matchedToken.specialToken = specialToken; - return matchedToken; - } - - jjmatchedKind = 0x7fffffff; - jjmatchedPos = 0; - curPos = jjMoveStringLiteralDfa0_0(); - if (jjmatchedKind != 0x7fffffff) - { - if (jjmatchedPos + 1 < curPos) - input_stream.backup(curPos - jjmatchedPos - 1); - if ((jjtoToken[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L) - { - matchedToken = jjFillToken(); - matchedToken.specialToken = specialToken; - return matchedToken; - } - else - { - if ((jjtoSpecial[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L) - { - matchedToken = jjFillToken(); - if (specialToken == null) - specialToken = matchedToken; - else - { - matchedToken.specialToken = specialToken; - specialToken = (specialToken.next = matchedToken); - } - } - continue EOFLoop; - } - } - int error_line = input_stream.getEndLine(); - int error_column = input_stream.getEndColumn(); - String error_after = null; - boolean EOFSeen = false; - try { input_stream.readChar(); input_stream.backup(1); } - catch (java.io.IOException e1) { - EOFSeen = true; - error_after = curPos <= 1 ? "" : input_stream.GetImage(); - if (curChar == '\n' || curChar == '\r') { - error_line++; - error_column = 0; - } - else - error_column++; - } - if (!EOFSeen) { - input_stream.backup(1); - error_after = curPos <= 1 ? "" : input_stream.GetImage(); - } - throw new TokenMgrError(EOFSeen, curLexState, error_line, error_column, error_after, curChar, TokenMgrError.LEXICAL_ERROR); - } -} - -private void jjCheckNAdd(int state) -{ - if (jjrounds[state] != jjround) - { - jjstateSet[jjnewStateCnt++] = state; - jjrounds[state] = jjround; - } -} -private void jjAddStates(int start, int end) -{ - do { - jjstateSet[jjnewStateCnt++] = jjnextStates[start]; - } while (start++ != end); -} -private void jjCheckNAddTwoStates(int state1, int state2) -{ - jjCheckNAdd(state1); - jjCheckNAdd(state2); -} - -private void jjCheckNAddStates(int start, int end) -{ - do { - jjCheckNAdd(jjnextStates[start]); - } while (start++ != end); -} - -} From b50c9bd89c12681efc763ef500ae258206c85b10 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Wed, 17 Jul 2019 20:19:50 +0200 Subject: [PATCH 0021/1003] restore travis.yml --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 07ab6a468..68930cbba 100644 --- a/.travis.yml +++ b/.travis.yml @@ -16,7 +16,7 @@ before_install: - sudo apt-get install gcc-5 -y # - eval “CC=gcc-5 && CXX=g++-5” ## Uncomment line below to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar - #- sh ./build-vlog-library.sh + - sh ./build-vlog-library.sh after_success: - mvn clean cobertura:cobertura coveralls:cobertura From ac2a986ae794f83d55bda777b6a795f1b83bb723 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Wed, 17 Jul 2019 20:29:30 +0200 Subject: [PATCH 0022/1003] skipTest --- .travis.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 68930cbba..42f067d5b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -16,7 +16,9 @@ before_install: - sudo apt-get install gcc-5 -y # - eval “CC=gcc-5 && CXX=g++-5” ## Uncomment line below to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar - - sh ./build-vlog-library.sh +# - sh ./build-vlog-library.sh + +install: mvn install $OPTIONS -DskipTests=true after_success: - mvn clean cobertura:cobertura coveralls:cobertura From 909d2e36d38bd6a19622feb1a0b1325f9304dd0d Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Thu, 18 Jul 2019 14:45:35 +0200 Subject: [PATCH 0023/1003] update vlog4js url --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index efb3e9ad9..6631fa789 100644 --- a/pom.xml +++ b/pom.xml @@ -12,7 +12,7 @@ VLog4j A Java library for working with the VLog rule engine - https://github.com/mkroetzsch/vlog4j + https://github.com/knowsys/vlog4j vlog4j-core From 9433c8e60946a6264ccc97b4f2e99be2b01d771a Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Fri, 19 Jul 2019 13:39:35 +0200 Subject: [PATCH 0024/1003] update url --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index f15a780ec..5416e23e6 100644 --- a/pom.xml +++ b/pom.xml @@ -12,7 +12,7 @@ VLog4j A Java library for working with the VLog rule engine - https://github.com/mkroetzsch/vlog4j + https://github.com/knowsys/vlog4j vlog4j-core @@ -242,7 +242,7 @@ maven-javadoc-plugin ${maven.javadoc.version} - VLog4j homepage]]> + VLog4j homepage]]> From 8e51700ec1a85cd29ed47e16a0cb18cac76185ec Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Fri, 19 Jul 2019 14:00:56 +0200 Subject: [PATCH 0025/1003] fix typo --- .../java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 895f9687b..64424b175 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -146,7 +146,7 @@ public static Reasoner getInstance() { /** * Loaded {@link Rule}s can be re-written internally to an equivalent set of - * rules, according to given {@code ruleRewritingStrategy}. If no staregy is + * rules, according to given {@code ruleRewritingStrategy}. If no strategy is * set, the default value is {@link RuleRewriteStrategy#NONE}, meaning that the * rules will not be re-written. * From f41c3f089c08c840a98d829d00d4fbb2b157a7ee Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Fri, 19 Jul 2019 14:48:34 +0200 Subject: [PATCH 0026/1003] fix typos --- .../implementation/GeneratedAnonymousIndividualsTest.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java index 7e6d2eec6..202ddebd2 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java @@ -80,7 +80,7 @@ public void testBlanksSkolemChaseNoRuleRewrite() reasoner.reason(); reasoner.exportQueryAnswersToCsv(queryAtom, includeBlanksFilePath, true); - checkTowDistinctBlanksGenerated(reasoner); + checkTwoDistinctBlanksGenerated(reasoner); } } @@ -99,7 +99,7 @@ public void testBlanksSkolemChaseSplitHeadPieces() reasoner.reason(); reasoner.exportQueryAnswersToCsv(queryAtom, includeBlanksFilePath, true); - checkTowDistinctBlanksGenerated(reasoner); + checkTwoDistinctBlanksGenerated(reasoner); } } @@ -116,7 +116,7 @@ public void testBlanksRestrictedChaseNoRuleRewrite() reasoner.reason(); reasoner.exportQueryAnswersToCsv(queryAtom, includeBlanksFilePath, true); - checkTowDistinctBlanksGenerated(reasoner); + checkTwoDistinctBlanksGenerated(reasoner); } } @@ -153,7 +153,7 @@ public void testBlanksRestrictedChaseSplitHeadPieces() } } - private void checkTowDistinctBlanksGenerated(final Reasoner reasoner) + private void checkTwoDistinctBlanksGenerated(final Reasoner reasoner) throws ReasonerStateException, IOException, EdbIdbSeparationException { // expected facts: P(c, _:b1), P(c, _:b2) final List> csvContentIncludeBlanks = FileDataSourceTestUtils.getCSVContent(includeBlanksFilePath); From 245202a64d65e2cf4d0ce88fe46e195339dcfc34 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Fri, 19 Jul 2019 15:06:03 +0200 Subject: [PATCH 0027/1003] fix test --- .../GeneratedAnonymousIndividualsTest.java | 22 ++++--------------- 1 file changed, 4 insertions(+), 18 deletions(-) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java index 7e6d2eec6..d05e7f1a5 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java @@ -89,8 +89,8 @@ public void testBlanksSkolemChaseSplitHeadPieces() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { try (final Reasoner reasoner = Reasoner.getInstance()) { reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); - // P(?x) -> P(?x,!y), P(?x,!z) - // after split becomes {{P(?x) -> P(?x,!y), {P(?x)-> P(?x,!z)}} + // {P(?x) -> P(?x,!y), P(?x,!z)} + // after split becomes { {P(?x) -> P(?x,!y,!z)}, {P(?x,!y,!z) ->, P(?x,!y)}, {P(?x,!y,!z) ->, P(?x,!z)} } reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.SPLIT_HEAD_PIECES); reasoner.addFacts(fact); @@ -128,28 +128,14 @@ public void testBlanksRestrictedChaseSplitHeadPieces() reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); // {P(?x) -> P(?x,!y), P(?x,!z)} - // after split becomes {{P(?x) -> P(?x,!y), {P(?x)-> P(?x,!z)}} + // after split becomes { {P(?x) -> P(?x,!y,!z)}, {P(?x,!y,!z) ->, P(?x,!y)}, {P(?x,!y,!z) ->, P(?x,!z)} } reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.SPLIT_HEAD_PIECES); reasoner.addFacts(fact); reasoner.addRules(existentialRule); reasoner.load(); reasoner.reason(); - reasoner.exportQueryAnswersToCsv(queryAtom, includeBlanksFilePath, true); - // expected fact: P(c, _:b) - final List> csvContentIncludeBlanks = FileDataSourceTestUtils.getCSVContent(includeBlanksFilePath); - assertTrue(csvContentIncludeBlanks.size() == 1); - for (final List queryResult : csvContentIncludeBlanks) { - assertTrue(queryResult.size() == 2); - assertEquals(queryResult.get(0), "c"); - } - final String blank = csvContentIncludeBlanks.get(0).get(1); - assertNotEquals("c", blank); - - reasoner.exportQueryAnswersToCsv(queryAtom, excludeBlanksFilePath, false); - final List> csvContentExcludeBlanks = FileDataSourceTestUtils.getCSVContent(excludeBlanksFilePath); - assertTrue(csvContentExcludeBlanks.isEmpty()); - + checkTowDistinctBlanksGenerated(reasoner); } } From 5ef1225755583787c3bc677b23e41453c5f606d3 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Fri, 19 Jul 2019 15:07:36 +0200 Subject: [PATCH 0028/1003] fix typo --- .../implementation/GeneratedAnonymousIndividualsTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java index 55ba099ef..35138ce02 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java @@ -135,7 +135,7 @@ public void testBlanksRestrictedChaseSplitHeadPieces() reasoner.load(); reasoner.reason(); - checkTowDistinctBlanksGenerated(reasoner); + checkTwoDistinctBlanksGenerated(reasoner); } } From e5ad1357c8003c3b231dfa2e4b97968b535f119c Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Fri, 19 Jul 2019 15:36:43 +0200 Subject: [PATCH 0029/1003] improve description --- .../GeneratedAnonymousIndividualsTest.java | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java index 35138ce02..0a02854da 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java @@ -89,8 +89,8 @@ public void testBlanksSkolemChaseSplitHeadPieces() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { try (final Reasoner reasoner = Reasoner.getInstance()) { reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); - // {P(?x) -> P(?x,!y), P(?x,!z)} - // after split becomes { {P(?x) -> P(?x,!y,!z)}, {P(?x,!y,!z) ->, P(?x,!y)}, {P(?x,!y,!z) ->, P(?x,!z)} } + // the rule {P(?x) -> P(?x,!y), P(?x,!z)} after split becomes: + // { {P(?x) -> P(?x,!y,!z)}, {P(?x,?y,?z) ->, P(?x,?y)}, {P(?x,?y,?z) ->, P(?x,?z)} } reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.SPLIT_HEAD_PIECES); reasoner.addFacts(fact); @@ -126,11 +126,11 @@ public void testBlanksRestrictedChaseSplitHeadPieces() try (final Reasoner reasoner = Reasoner.getInstance()) { reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); - - // {P(?x) -> P(?x,!y), P(?x,!z)} - // after split becomes { {P(?x) -> P(?x,!y,!z)}, {P(?x,!y,!z) ->, P(?x,!y)}, {P(?x,!y,!z) ->, P(?x,!z)} } + // the rule {P(?x) -> P(?x,!y), P(?x,!z)} after split becomes: + // { {P(?x) -> P(?x,!y,!z)}, {P(?x,?y,?z) ->, P(?x,?y)}, {P(?x,?y,?z) ->, P(?x,?z)} } reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.SPLIT_HEAD_PIECES); - reasoner.addFacts(fact); + + reasoner.addFacts(fact); reasoner.addRules(existentialRule); reasoner.load(); reasoner.reason(); From 0d847c6b3a8d54cf08b1237252b2fffae914e7f9 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Fri, 19 Jul 2019 17:06:55 +0200 Subject: [PATCH 0030/1003] resolve agains @base --- .../vlog4j/examples/SimpleExampleJavaCC.java | 55 +++++++++++++++++++ .../vlog4j/parser/api/Prologue.java | 2 + .../vlog4j/parser/api/RuleParser.java | 1 - .../parser/implementation/LocalPrologue.java | 36 ++++++++---- .../implementation/javacc/JavaCCRuleParser.jj | 2 +- 5 files changed, 83 insertions(+), 13 deletions(-) create mode 100644 vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleExampleJavaCC.java diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleExampleJavaCC.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleExampleJavaCC.java new file mode 100644 index 000000000..1f88b4175 --- /dev/null +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleExampleJavaCC.java @@ -0,0 +1,55 @@ +package org.semanticweb.vlog4j.examples; + +import java.io.IOException; + +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.reasoner.Reasoner; +import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; +import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; +import org.semanticweb.vlog4j.parser.api.RuleParser; +import org.semanticweb.vlog4j.parser.implementation.PrologueException; +import org.semanticweb.vlog4j.parser.implementation.javacc.ParseException; + +public class SimpleExampleJavaCC { + public static void main(final String[] args) throws ParseException, PrologueException, ReasonerStateException, + EdbIdbSeparationException, IncompatiblePredicateArityException, IOException { + + ExamplesUtils.configureLogging(); + + try (final Reasoner reasoner = Reasoner.getInstance()) { + + String rules = ""; + rules += "@base . \n"; + rules += "

    () . \n"; + rules += "(?x) :-

    (?x) . \n"; + rules += "(?y) . \n"; + + RuleParser rp = new RuleParser(rules); + rp.parse(); + + reasoner.addFacts(rp.getFacts()); + reasoner.addRules(rp.getRules()); + + System.out.println("Rules configured:\n--"); + reasoner.getRules().forEach(System.out::println); + System.out.println("--"); + reasoner.load(); + + System.out.println("Loading completed."); + System.out.println("Starting reasoning (including SPARQL query answering) ..."); + reasoner.reason(); + System.out.println("... reasoning completed.\n--"); + + System.out.println("Number of results in queries:"); + QueryResultIterator answers; + for (PositiveLiteral l : rp.getQueries()) { + answers = reasoner.answerQuery(l, true); + System.out.print(l.toString()); + System.out.println(": " + ExamplesUtils.iteratorSize(answers)); + } + System.out.println("Done."); + } + } +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/api/Prologue.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/api/Prologue.java index 4eaf715f0..0bac495fc 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/api/Prologue.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/api/Prologue.java @@ -34,4 +34,6 @@ public interface Prologue { String resolvePName(String prefixedName) throws PrologueException; + String absolutize(String prefixedName) throws PrologueException; + } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/api/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/api/RuleParser.java index b08ab2e80..248fa901b 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/api/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/api/RuleParser.java @@ -36,7 +36,6 @@ public RuleParser(InputStream stream, String encoding) { super(stream, encoding); } - public RuleParser(String rules) { super(new ByteArrayInputStream(rules.getBytes()), "UTF-8"); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/LocalPrologue.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/LocalPrologue.java index df6975556..35a01145f 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/LocalPrologue.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/LocalPrologue.java @@ -30,7 +30,7 @@ final public class LocalPrologue implements Prologue { - //??? Can I use default logguer + // ??? Can I use default logguer final static Logger logger = LoggerFactory.getLogger(LocalPrologue.class.getName()); private static Prologue prologue; @@ -86,28 +86,42 @@ public void setBase(String baseString) throws PrologueException { throw new PrologueException("Base must be ab absolute IRI: " + baseString); } baseURI = newBase; + System.out.println(baseString); + System.out.println(baseURI.toString()); } public String resolvePName(String prefixedName) throws PrologueException { // from the parser we know that prefixedName is of the form: // prefix:something // remember that the prefixes are stored with the colon symbol - // This does not return the surrounding <> + // This does not return the surrounding angle brackes <> int idx = prefixedName.indexOf(":") + 1; String prefix = prefixedName.substring(0, idx); String sufix = prefixedName.substring(idx); - if (prefixes.containsKey(prefix)) { - // if the last character of the fullUri is '#', the resolve method of - // java.net.URI does not work well - String fullUri = prefixes.get(prefix).toString(); - if (fullUri.charAt(fullUri.length() - 1) == '#') - return fullUri + sufix; - // if it is different, then it works - return prefixes.get(prefix).resolve(sufix).toString(); - } + if (prefixes.containsKey(prefix)) + localResolver(prefixes.get(prefix), sufix); throw new PrologueException("@prefix not found: " + prefixedName); } + public String absolutize(String iri) throws PrologueException { + URI relative = URI.create(iri); + if (relative.isAbsolute()) + return iri; + if (baseURI == null) + throw new PrologueException("@base not defined"); + return localResolver(baseURI, iri); + } + + private String localResolver(URI uri, String relative) { + // if the last character of the uri is '#', the resolve method of + // java.net.URI does not work well + String uriString = uri.toString(); + if (uriString.charAt(uriString.length() - 1) == '#') + return uriString + relative; + else + return uri.resolve(relative).toString(); + } + } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj index 005f25bd4..da2499fb9 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj @@ -259,7 +259,7 @@ String IRI() throws PrologueException: iri = IRIREF() | iri = PrefixedName() ) - { return "<"+iri+">"; } + { return "<"+localPrologue.absolutize(iri)+">"; } } String PrefixedName() throws PrologueException: From 7a5bbc77aefa88cec8f07f211945507105737530 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Fri, 19 Jul 2019 17:40:39 +0200 Subject: [PATCH 0031/1003] accept exclamation mark to reprensent variables. --- .../vlog4j/examples/SimpleExampleJavaCC.java | 2 ++ .../parser/implementation/javacc/JavaCCRuleParser.jj | 12 +++++++++--- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleExampleJavaCC.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleExampleJavaCC.java index 1f88b4175..ef63c1cc1 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleExampleJavaCC.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleExampleJavaCC.java @@ -25,6 +25,8 @@ public static void main(final String[] args) throws ParseException, PrologueExce rules += "

    () . \n"; rules += "(?x) :-

    (?x) . \n"; rules += "(?y) . \n"; + rules += "(?x,!y) :- (?x) . \n"; + rules += "(?x,?y) . \n"; RuleParser rp = new RuleParser(rules); rp.parse(); diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj index da2499fb9..61f9a6a61 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj @@ -15,6 +15,8 @@ package org.semanticweb.vlog4j.parser.implementation.javacc; import java.util.List; import java.util.ArrayList; +import java.util.Set; +import java.util.HashSet; import org.semanticweb.vlog4j.parser.implementation.RuleParserBase; import org.semanticweb.vlog4j.parser.implementation.PrologueException; @@ -99,7 +101,9 @@ Rule rule() throws PrologueException: } { head = listOfPositiveLiterals() < ARROW > body = listOfLiterals() < DOT > - { return makeRule(makePositiveConjunction(head), makeConjunction(body)); } + { + return makeRule(makePositiveConjunction(head), makeConjunction(body)); + } } List < PositiveLiteral > listOfPositiveLiterals() throws PrologueException: @@ -183,7 +187,8 @@ Term term() throws PrologueException: { s = IRI() { return makeConstant(s); } | s = RDFLiteral() { return makeConstant(s); } -| t = < VAR > { return makeVariable(t.image.substring(1)); } +| t = < UNIVAR > { return makeVariable(t.image.substring(1)); } +| t = < EXIVAR > { return makeVariable(t.image.substring(1)); } } /** [16] */ @@ -413,7 +418,8 @@ TOKEN : | < PNAME_NS : (< PN_PREFIX >)? ":" > | < PNAME_LN : < PNAME_NS > < PN_LOCAL > > | < BLANK_NODE_LABEL : "_:" < PN_LOCAL > > -| < VAR : "?" < VARORPREDNAME > > +| < UNIVAR : "?" < VARORPREDNAME > > +| < EXIVAR : "!" < VARORPREDNAME > > | < LANGTAG : < AT > (< A2Z >)+ ( From 13620a63e499d8e9f0435aef95e614d1247674a7 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Fri, 19 Jul 2019 18:40:48 +0200 Subject: [PATCH 0032/1003] force the use of exclamation mark in existentially quantified variables --- .../data/input/localSyntax/doid-example.txt | 2 +- .../vlog4j/examples/SimpleExampleJavaCC.java | 22 +++- .../implementation/javacc/JavaCCRuleParser.jj | 101 ++++++++++++++---- 3 files changed, 101 insertions(+), 24 deletions(-) diff --git a/vlog4j-examples/src/main/data/input/localSyntax/doid-example.txt b/vlog4j-examples/src/main/data/input/localSyntax/doid-example.txt index 2c46b3418..0d85f3b64 100644 --- a/vlog4j-examples/src/main/data/input/localSyntax/doid-example.txt +++ b/vlog4j-examples/src/main/data/input/localSyntax/doid-example.txt @@ -4,7 +4,7 @@ %this is a comment deathCause(?X, ?Z) :- recentDeathsCause(?X, ?Z) . -deathCause(?X, ?Z) :- recentDeaths(?X) . +deathCause(?X, !Z) :- recentDeaths(?X) . doid(?Iri,?DoidId) :- doidTriple(?Iri,,?DoidId) . hasDoid(?X) :- diseaseId(?X,?DoidId) . diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleExampleJavaCC.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleExampleJavaCC.java index ef63c1cc1..009758326 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleExampleJavaCC.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleExampleJavaCC.java @@ -1,5 +1,25 @@ package org.semanticweb.vlog4j.examples; +/*- + * #%L + * VLog4j Examples + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import java.io.IOException; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; @@ -25,7 +45,7 @@ public static void main(final String[] args) throws ParseException, PrologueExce rules += "

    () . \n"; rules += "(?x) :-

    (?x) . \n"; rules += "(?y) . \n"; - rules += "(?x,!y) :- (?x) . \n"; + rules += "(?x,?y) :- (?x) . \n"; rules += "(?x,?y) . \n"; RuleParser rp = new RuleParser(rules); diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj index 61f9a6a61..e34fefc25 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj @@ -39,6 +39,13 @@ import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeC public class JavaCCRuleParser extends RuleParserBase { + // we use an int to specify where a variable comes from + // 0 if the variable does not come from a rule + // 1 if the variable comes from the head of a rule + // 2 if the variable comes from the body of a rule + Set bodyVars; + Set headExiVars; + Set headUniVars; } PARSER_END(JavaCCRuleParser) @@ -85,7 +92,7 @@ void statement() throws PrologueException: } { LOOKAHEAD(rule()) r = rule() { listOfRules.add(r);} -| l = positiveLiteral() < DOT > +| l = positiveLiteral(0) < DOT > //not from a rule { if (l.getVariables().isEmpty()) listOfFacts.add(l); @@ -96,76 +103,106 @@ void statement() throws PrologueException: Rule rule() throws PrologueException: { + Rule rule; List < PositiveLiteral > head; List < Literal > body; + bodyVars = new HashSet(); + headExiVars = new HashSet(); + headUniVars = new HashSet(); + Set headExiVarsInterBodyVars; + Set headExiVarsInterheadUniVars; + Set headUniVarsInterBodyVars; } { - head = listOfPositiveLiterals() < ARROW > body = listOfLiterals() < DOT > + // 1 rule's head + // 2 rule's body + head = listOfPositiveLiterals(1) < ARROW > body = listOfLiterals(2) < DOT > { - return makeRule(makePositiveConjunction(head), makeConjunction(body)); + + rule = makeRule(makePositiveConjunction(head), makeConjunction(body)); + // check that the intersection between headExiVars and BodyVars is empty + headExiVarsInterBodyVars = new HashSet(headExiVars); + headExiVarsInterBodyVars.retainAll(bodyVars); + if (!headExiVarsInterBodyVars.isEmpty()) { + throw new ParseException("\nMalformed Rule:\nSome existential variables appers in the body.\nRule: " + rule.toString()); + } + // check that the intersection between headExiVars and headUniVars is empty + headExiVarsInterheadUniVars = new HashSet(headExiVars); + headExiVarsInterheadUniVars.retainAll(headUniVars); + if (!headExiVarsInterBodyVars.isEmpty()) { + throw new ParseException("\nMalformed Rule:\nSome existential variables appers as universal variables in the head.\nRule: " + rule.toString()); + } + // check that bodyVars contains headUniVars + headUniVarsInterBodyVars = new HashSet(headUniVars); + headUniVarsInterBodyVars.retainAll(bodyVars); + if (!headUniVarsInterBodyVars.equals(headUniVars)) { + throw new ParseException("\nMalformed Rule:\nSome universal variables appearing in the head does not apper in the body.\nRule: " + rule.toString()); + } + + return rule; } } -List < PositiveLiteral > listOfPositiveLiterals() throws PrologueException: +List < PositiveLiteral > listOfPositiveLiterals(int itComesFrom) throws PrologueException: { PositiveLiteral l; List < PositiveLiteral > list = new ArrayList < PositiveLiteral > (); } { - l = positiveLiteral() { list.add(l); } - ( < COMMA > l = positiveLiteral() { list.add(l); } )* + l = positiveLiteral(itComesFrom) { list.add(l); } + ( < COMMA > l = positiveLiteral(itComesFrom) { list.add(l); } )* { return list; } } -List < Literal > listOfLiterals() throws PrologueException: +List < Literal > listOfLiterals(int itComesFrom) throws PrologueException: { Literal l; List < Literal > list = new ArrayList < Literal > (); } { - l = literal() { list.add(l); } - ( < COMMA > l = literal() { list.add(l); } )* + l = literal(itComesFrom) { list.add(l); } + ( < COMMA > l = literal(itComesFrom) { list.add(l); } )* { return list; } } -Literal literal() throws PrologueException: +Literal literal(int itComesFrom) throws PrologueException: { Literal l = null; } { - l = positiveLiteral() { return l; } -| l = negativeLiteral() { return l; } + l = positiveLiteral(itComesFrom) { return l; } +| l = negativeLiteral(itComesFrom) { return l; } } -PositiveLiteral positiveLiteral() throws PrologueException: +PositiveLiteral positiveLiteral(int itComesFrom) throws PrologueException: { Token t; List < Term > terms; String predicateName; } { - predicateName = predicateName() < LPAREN > terms = listOfTerms() < RPAREN > + predicateName = predicateName() < LPAREN > terms = listOfTerms(itComesFrom) < RPAREN > { return makePositiveLiteral(predicateName, terms); } } -NegativeLiteral negativeLiteral() throws PrologueException: +NegativeLiteral negativeLiteral(int itComesFrom) throws PrologueException: { List < Term > terms; String predicateName; } { - < TILDE > predicateName = predicateName() < LPAREN > terms = listOfTerms() < RPAREN > + < TILDE > predicateName = predicateName() < LPAREN > terms = listOfTerms(itComesFrom) < RPAREN > { return makeNegativeLiteral(predicateName, terms); } } -List < Term > listOfTerms() throws PrologueException: +List < Term > listOfTerms(int itComesFrom) throws PrologueException: { Term t; List < Term > list = new ArrayList < Term > (); } { - t = term() { list.add(t); } - ( < COMMA > t = term() { list.add(t); } )* + t = term(itComesFrom) { list.add(t); } + ( < COMMA > t = term(itComesFrom) { list.add(t); } )* { return list; } } @@ -179,7 +216,11 @@ String predicateName() throws PrologueException: | t = < VARORPREDNAME > { return t.image; } } -Term term() throws PrologueException: +// we use an int to specify where a variable comes from +// 0 if the variable does not come from a rule +// 1 if the variable comes from the head of a rule +// 2 if the variable comes from the body of a rule +Term term(int itComesFrom) throws PrologueException: { String s; Token t; @@ -187,8 +228,24 @@ Term term() throws PrologueException: { s = IRI() { return makeConstant(s); } | s = RDFLiteral() { return makeConstant(s); } -| t = < UNIVAR > { return makeVariable(t.image.substring(1)); } -| t = < EXIVAR > { return makeVariable(t.image.substring(1)); } +| t = < UNIVAR > + { + s = t.image.substring(1); + if (itComesFrom == 1) + headUniVars.add(s); + if (itComesFrom == 2) + bodyVars.add(s); + return makeVariable(t.image.substring(1)); + } +| t = < EXIVAR > + { + s = t.image.substring(1); + if (itComesFrom == 1) + headExiVars.add(s); + if (itComesFrom == 2) + throw new ParseException("Existentialy quantified variables can not appear in the body. Line: " + t.beginLine + ", Column: "+ t.beginColumn); + return makeVariable(t.image.substring(1)); + } } /** [16] */ From e7ace8ff2296548de95764ba3f99502713d4a854 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 14 Aug 2019 11:07:13 +0200 Subject: [PATCH 0033/1003] Autom syntax formatting --- .../vlog4j/parser/implementation/LocalPrologue.java | 1 - .../vlog4j/parser/implementation/RuleParserBase.java | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/LocalPrologue.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/LocalPrologue.java index 35a01145f..b726b6f6b 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/LocalPrologue.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/LocalPrologue.java @@ -30,7 +30,6 @@ final public class LocalPrologue implements Prologue { - // ??? Can I use default logguer final static Logger logger = LoggerFactory.getLogger(LocalPrologue.class.getName()); private static Prologue prologue; diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/RuleParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/RuleParserBase.java index 9c9df09de..438a65fdb 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/RuleParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/RuleParserBase.java @@ -347,7 +347,8 @@ protected static String unescapePName(String s, int line, int column) throws Par sb.append(ch2); break; default: - throw new ParseException("Illegal prefix name escape: " + ch2+", line:" + line + ", column: " + column); + throw new ParseException( + "Illegal prefix name escape: " + ch2 + ", line:" + line + ", column: " + column); } } return sb.toString(); @@ -365,5 +366,4 @@ public List getQueries() { return listOfQueries; } - } From 9913daabe60257ba22645097de0419703f27244f Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 14 Aug 2019 11:14:23 +0200 Subject: [PATCH 0034/1003] Rename parser module into syntax --- pom.xml | 2 +- {vlog4j-parser => vlog4j-syntax}/LICENSE.txt | 0 {vlog4j-parser => vlog4j-syntax}/pom.xml | 4 ++-- .../main/java/org/semanticweb/vlog4j/parser/api/Prologue.java | 0 .../java/org/semanticweb/vlog4j/parser/api/RuleParser.java | 0 .../vlog4j/parser/implementation/LocalPrologue.java | 0 .../vlog4j/parser/implementation/PrologueException.java | 0 .../vlog4j/parser/implementation/RuleParserBase.java | 0 .../vlog4j/parser/implementation/javacc/.gitignore | 0 .../vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj | 0 10 files changed, 3 insertions(+), 3 deletions(-) rename {vlog4j-parser => vlog4j-syntax}/LICENSE.txt (100%) rename {vlog4j-parser => vlog4j-syntax}/pom.xml (97%) rename {vlog4j-parser => vlog4j-syntax}/src/main/java/org/semanticweb/vlog4j/parser/api/Prologue.java (100%) rename {vlog4j-parser => vlog4j-syntax}/src/main/java/org/semanticweb/vlog4j/parser/api/RuleParser.java (100%) rename {vlog4j-parser => vlog4j-syntax}/src/main/java/org/semanticweb/vlog4j/parser/implementation/LocalPrologue.java (100%) rename {vlog4j-parser => vlog4j-syntax}/src/main/java/org/semanticweb/vlog4j/parser/implementation/PrologueException.java (100%) rename {vlog4j-parser => vlog4j-syntax}/src/main/java/org/semanticweb/vlog4j/parser/implementation/RuleParserBase.java (100%) rename {vlog4j-parser => vlog4j-syntax}/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/.gitignore (100%) rename {vlog4j-parser => vlog4j-syntax}/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj (100%) diff --git a/pom.xml b/pom.xml index 13d612650..e84e1c78c 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ vlog4j-examples vlog4j-owlapi vlog4j-graal - vlog4j-parser + vlog4j-syntax diff --git a/vlog4j-parser/LICENSE.txt b/vlog4j-syntax/LICENSE.txt similarity index 100% rename from vlog4j-parser/LICENSE.txt rename to vlog4j-syntax/LICENSE.txt diff --git a/vlog4j-parser/pom.xml b/vlog4j-syntax/pom.xml similarity index 97% rename from vlog4j-parser/pom.xml rename to vlog4j-syntax/pom.xml index 3843c2954..7f827bf3b 100644 --- a/vlog4j-parser/pom.xml +++ b/vlog4j-syntax/pom.xml @@ -11,9 +11,9 @@ 0.4.0-SNAPSHOT - vlog4j-parser + vlog4j-syntax - vlog4j-parser + vlog4j-syntax http://maven.apache.org UTF-8 diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/api/Prologue.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/api/Prologue.java similarity index 100% rename from vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/api/Prologue.java rename to vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/api/Prologue.java diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/api/RuleParser.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/api/RuleParser.java similarity index 100% rename from vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/api/RuleParser.java rename to vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/api/RuleParser.java diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/LocalPrologue.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/LocalPrologue.java similarity index 100% rename from vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/LocalPrologue.java rename to vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/LocalPrologue.java diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/PrologueException.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/PrologueException.java similarity index 100% rename from vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/PrologueException.java rename to vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/PrologueException.java diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/RuleParserBase.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/RuleParserBase.java similarity index 100% rename from vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/RuleParserBase.java rename to vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/RuleParserBase.java diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/.gitignore b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/.gitignore similarity index 100% rename from vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/.gitignore rename to vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/.gitignore diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj similarity index 100% rename from vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj rename to vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj From c66ab677288a0ffc37ecb52acc0c37220f9fad28 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 14 Aug 2019 11:26:14 +0200 Subject: [PATCH 0035/1003] Renamed packages, moved a file --- .../vlog4j/{parser/api => syntax/common}/Prologue.java | 6 +++--- .../implementation => syntax/parser}/LocalPrologue.java | 6 +++--- .../implementation => syntax/parser}/PrologueException.java | 2 +- .../vlog4j/{parser/api => syntax/parser}/RuleParser.java | 2 +- .../implementation => syntax/parser}/RuleParserBase.java | 5 ++--- 5 files changed, 10 insertions(+), 11 deletions(-) rename vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/{parser/api => syntax/common}/Prologue.java (88%) rename vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/{parser/implementation => syntax/parser}/LocalPrologue.java (97%) rename vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/{parser/implementation => syntax/parser}/PrologueException.java (94%) rename vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/{parser/api => syntax/parser}/RuleParser.java (96%) rename vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/{parser/implementation => syntax/parser}/RuleParserBase.java (98%) diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/api/Prologue.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/Prologue.java similarity index 88% rename from vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/api/Prologue.java rename to vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/Prologue.java index 0bac495fc..d334e72c0 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/api/Prologue.java +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/Prologue.java @@ -1,8 +1,8 @@ -package org.semanticweb.vlog4j.parser.api; +package org.semanticweb.vlog4j.syntax.common; /*- * #%L - * vlog4j-parser + * vlog4j-syntax * %% * Copyright (C) 2018 - 2019 VLog4j Developers * %% @@ -20,7 +20,7 @@ * #L% */ -import org.semanticweb.vlog4j.parser.implementation.PrologueException; +import org.semanticweb.vlog4j.syntax.parser.PrologueException; public interface Prologue { diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/LocalPrologue.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/LocalPrologue.java similarity index 97% rename from vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/LocalPrologue.java rename to vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/LocalPrologue.java index b726b6f6b..814f63535 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/LocalPrologue.java +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/LocalPrologue.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.parser.implementation; +package org.semanticweb.vlog4j.syntax.parser; /*- * #%L @@ -23,11 +23,11 @@ import java.net.URI; import java.util.HashMap; import java.util.Map; + +import org.semanticweb.vlog4j.syntax.common.Prologue; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.semanticweb.vlog4j.parser.api.Prologue; - final public class LocalPrologue implements Prologue { final static Logger logger = LoggerFactory.getLogger(LocalPrologue.class.getName()); diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/PrologueException.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/PrologueException.java similarity index 94% rename from vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/PrologueException.java rename to vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/PrologueException.java index 3ee43243d..d99ddad60 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/PrologueException.java +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/PrologueException.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.parser.implementation; +package org.semanticweb.vlog4j.syntax.parser; /*- * #%L diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/api/RuleParser.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParser.java similarity index 96% rename from vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/api/RuleParser.java rename to vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParser.java index 248fa901b..96a042bb9 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/api/RuleParser.java +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParser.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.parser.api; +package org.semanticweb.vlog4j.syntax.parser; /*- * #%L diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/RuleParserBase.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParserBase.java similarity index 98% rename from vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/RuleParserBase.java rename to vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParserBase.java index 438a65fdb..b41e4eb01 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/RuleParserBase.java +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParserBase.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.parser.implementation; +package org.semanticweb.vlog4j.syntax.parser; /*- * #%L @@ -26,9 +26,8 @@ import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.parser.api.Prologue; - import org.semanticweb.vlog4j.parser.implementation.javacc.ParseException; +import org.semanticweb.vlog4j.syntax.common.Prologue; import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeConstant; From d495e62a4819ad3e444bc9a4afa3eb77b37380a5 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 14 Aug 2019 13:12:36 +0200 Subject: [PATCH 0036/1003] Some rewriting of prefix management code --- .../syntax/common/PrefixDeclarations.java | 58 ++++++++ .../vlog4j/syntax/common/Prologue.java | 39 ------ .../parser/LocalPrefixDeclarations.java | 100 ++++++++++++++ .../vlog4j/syntax/parser/LocalPrologue.java | 126 ------------------ .../vlog4j/syntax/parser/RuleParserBase.java | 8 +- 5 files changed, 162 insertions(+), 169 deletions(-) create mode 100644 vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/PrefixDeclarations.java delete mode 100644 vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/Prologue.java create mode 100644 vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/LocalPrefixDeclarations.java delete mode 100644 vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/LocalPrologue.java diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/PrefixDeclarations.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/PrefixDeclarations.java new file mode 100644 index 000000000..7ac91514a --- /dev/null +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/PrefixDeclarations.java @@ -0,0 +1,58 @@ +package org.semanticweb.vlog4j.syntax.common; + +/*- + * #%L + * vlog4j-syntax + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.vlog4j.syntax.parser.PrologueException; + +/** + * Registry that manages prefixes and base namespace declarations as used for + * parsing and serialising inputs. + * + * @author Markus Kroetzsch + */ +public interface PrefixDeclarations { + + /** + * Returns the relevant base namespace. This should always return a result, + * possibly using a local default value if no base was declared. + * + * @return string of an absolute base IRI + */ + String getBase(); + + /** + * Sets the base namespace to the given value. This should only be done once, + * and not after the base namespace was assumed to be an implicit default value. + * + * @param base the new base namespace + * @throws PrologueException + */ + void setBase(String base) throws PrologueException; + + String getPrefix(String prefix) throws PrologueException; + + void setPrefix(String prefix, String iri) throws PrologueException; + + String resolvePrefixedName(String prefixedName) throws PrologueException; + + String absolutize(String prefixedName) throws PrologueException; + +} diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/Prologue.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/Prologue.java deleted file mode 100644 index d334e72c0..000000000 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/Prologue.java +++ /dev/null @@ -1,39 +0,0 @@ -package org.semanticweb.vlog4j.syntax.common; - -/*- - * #%L - * vlog4j-syntax - * %% - * Copyright (C) 2018 - 2019 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import org.semanticweb.vlog4j.syntax.parser.PrologueException; - -public interface Prologue { - - String getBase() throws PrologueException; - - void setBase(String base) throws PrologueException; - - String getPrefix(String prefix) throws PrologueException; - - void setPrefix(String prefix, String iri) throws PrologueException; - - String resolvePName(String prefixedName) throws PrologueException; - - String absolutize(String prefixedName) throws PrologueException; - -} diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/LocalPrefixDeclarations.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/LocalPrefixDeclarations.java new file mode 100644 index 000000000..fff92454d --- /dev/null +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/LocalPrefixDeclarations.java @@ -0,0 +1,100 @@ +package org.semanticweb.vlog4j.syntax.parser; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.net.URI; +import java.util.HashMap; +import java.util.Map; + +import org.semanticweb.vlog4j.syntax.common.PrefixDeclarations; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +final public class LocalPrefixDeclarations implements PrefixDeclarations { + + final static Logger logger = LoggerFactory.getLogger(LocalPrefixDeclarations.class.getName()); + + Map prefixes = new HashMap<>(); + final String defaultBaseUri; + String baseUri; + + LocalPrefixDeclarations(String defaultBaseUri) { + this.defaultBaseUri = defaultBaseUri; + } + + public String getBase() { + if (this.baseUri == null) { + this.baseUri = this.defaultBaseUri; + } + return baseUri.toString(); + } + + public String getPrefix(String prefix) throws PrologueException { + if (!prefixes.containsKey(prefix)) + throw new PrologueException("@prefix " + prefix + " not defined"); + return prefixes.get(prefix).toString(); + } + + public void setPrefix(String prefix, String uri) throws PrologueException { + if (prefixes.containsKey(prefix)) { + throw new PrologueException("Prefix " + prefix + " is already defined as <" + prefixes.get(prefix) + + ">. It cannot be redefined to mean <" + uri + ">."); + } + + logger.info("Setting new prefix: " + prefix + ", " + uri); + prefixes.put(prefix, uri); + } + + public void setBase(String baseUri) throws PrologueException { + if (this.baseUri != null) + throw new PrologueException( + "Base is already defined as <" + this.baseUri + "> and cannot be re-defined as " + baseUri); + logger.info("Setting base URI: " + baseUri); + this.baseUri = baseUri; + } + + public String resolvePrefixedName(String prefixedName) throws PrologueException { + // from the parser we know that prefixedName is of the form: + // prefix:something + // remember that the prefixes are stored with the colon symbol + // This does not return the surrounding angle brackes <> + + int idx = prefixedName.indexOf(":") + 1; + String prefix = prefixedName.substring(0, idx); + String suffix = prefixedName.substring(idx); + + if (prefixes.containsKey(prefix)) { + return this.prefixes.get(prefix) + suffix; + } else { + throw new PrologueException("Prefix " + prefixedName + " cannot be resolved (not declared yet)."); + } + } + + public String absolutize(String iri) throws PrologueException { + URI relative = URI.create(iri); + if (relative.isAbsolute()) { + return iri; + } else { + return getBase() + iri; + } + } + +} diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/LocalPrologue.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/LocalPrologue.java deleted file mode 100644 index 814f63535..000000000 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/LocalPrologue.java +++ /dev/null @@ -1,126 +0,0 @@ -package org.semanticweb.vlog4j.syntax.parser; - -/*- - * #%L - * vlog4j-parser - * %% - * Copyright (C) 2018 - 2019 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.net.URI; -import java.util.HashMap; -import java.util.Map; - -import org.semanticweb.vlog4j.syntax.common.Prologue; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -final public class LocalPrologue implements Prologue { - - final static Logger logger = LoggerFactory.getLogger(LocalPrologue.class.getName()); - - private static Prologue prologue; - - Map prefixes; - URI baseURI; - - private LocalPrologue() { - prefixes = new HashMap(); - baseURI = null; - } - - public static synchronized Prologue getPrologue() { - // Lazy initialization - if (prologue == null) { - prologue = new LocalPrologue(); - logger.info("Creating new prologue"); - } else { - logger.info("Prologue previously defined"); - } - return prologue; - } - - public String getBase() throws PrologueException { - if (baseURI == null) - throw new PrologueException("@base not defined"); - return baseURI.toString(); - } - - public String getPrefix(String prefix) throws PrologueException { - if (!prefixes.containsKey(prefix)) - throw new PrologueException("@prefix " + prefix + " not defined"); - return prefixes.get(prefix).toString(); - } - - public void setPrefix(String prefix, String uri) throws PrologueException { - if (prefixes.containsKey(prefix)) { - throw new PrologueException("Can not re define @prefix: " + prefix); - } - URI newUri = URI.create(uri); - if (!newUri.isAbsolute()) { - newUri = baseURI.resolve(newUri); - } - logger.info("Setting new prefix: " + prefix + ", " + newUri.toString()); - prefixes.putIfAbsent(prefix, newUri); - } - - public void setBase(String baseString) throws PrologueException { - if (baseURI != null) - throw new PrologueException("Can not re define @base: " + baseURI.toString() + ", " + baseString); - URI newBase = URI.create(baseString); - if (!newBase.isAbsolute()) { - throw new PrologueException("Base must be ab absolute IRI: " + baseString); - } - baseURI = newBase; - System.out.println(baseString); - System.out.println(baseURI.toString()); - } - - public String resolvePName(String prefixedName) throws PrologueException { - // from the parser we know that prefixedName is of the form: - // prefix:something - // remember that the prefixes are stored with the colon symbol - // This does not return the surrounding angle brackes <> - - int idx = prefixedName.indexOf(":") + 1; - String prefix = prefixedName.substring(0, idx); - String sufix = prefixedName.substring(idx); - - if (prefixes.containsKey(prefix)) - localResolver(prefixes.get(prefix), sufix); - throw new PrologueException("@prefix not found: " + prefixedName); - } - - public String absolutize(String iri) throws PrologueException { - URI relative = URI.create(iri); - if (relative.isAbsolute()) - return iri; - if (baseURI == null) - throw new PrologueException("@base not defined"); - return localResolver(baseURI, iri); - } - - private String localResolver(URI uri, String relative) { - // if the last character of the uri is '#', the resolve method of - // java.net.URI does not work well - String uriString = uri.toString(); - if (uriString.charAt(uriString.length() - 1) == '#') - return uriString + relative; - else - return uri.resolve(relative).toString(); - } - -} diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParserBase.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParserBase.java index b41e4eb01..322ed57ea 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParserBase.java +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParserBase.java @@ -27,24 +27,24 @@ import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.parser.implementation.javacc.ParseException; -import org.semanticweb.vlog4j.syntax.common.Prologue; +import org.semanticweb.vlog4j.syntax.common.PrefixDeclarations; import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeConstant; public class RuleParserBase { - protected Prologue localPrologue; + protected PrefixDeclarations localPrologue; protected List listOfRules; protected List listOfFacts; protected List listOfQueries; public RuleParserBase() { - localPrologue = LocalPrologue.getPrologue(); + localPrologue = new LocalPrefixDeclarations("http://example.org/todo/"); listOfRules = new ArrayList(); listOfFacts = new ArrayList(); listOfQueries = new ArrayList(); } - public RuleParserBase(Prologue prologue, List listOfRules, List listOfFacts, + public RuleParserBase(PrefixDeclarations prologue, List listOfRules, List listOfFacts, List listOfQueries) { this.localPrologue = prologue; this.listOfRules = listOfRules; From e77bfcaf8901118cc2558efe2e7c948ea4f7f616 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Wed, 14 Aug 2019 14:37:00 +0200 Subject: [PATCH 0037/1003] fix import bug and rename resolvePName to resolvePrefixedName --- .../parser/implementation/javacc/JavaCCRuleParser.jj | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj index e34fefc25..1873e4771 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj @@ -18,8 +18,8 @@ import java.util.ArrayList; import java.util.Set; import java.util.HashSet; -import org.semanticweb.vlog4j.parser.implementation.RuleParserBase; -import org.semanticweb.vlog4j.parser.implementation.PrologueException; +import org.semanticweb.vlog4j.syntax.parser.RuleParserBase; +import org.semanticweb.vlog4j.syntax.parser.PrologueException; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Literal; @@ -333,8 +333,8 @@ String PrefixedName() throws PrologueException: t = < PNAME_LN > | t = < PNAME_NS > ) - { return localPrologue.resolvePName(t.image);} - //{ return localPrologue.resolvePName(t.image, t.beginLine, t.beginColumn);} + { return localPrologue.resolvePrefixedName(t.image);} + //{ return localPrologue.resolvePrefixedName(t.image, t.beginLine, t.beginColumn);} } String IRIREF() : From 474f0470f58f75158ea39a48dae0dafc6161978f Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 14 Aug 2019 15:32:16 +0200 Subject: [PATCH 0038/1003] RuleParser now wraps JavaCC parser; default base --- vlog4j-examples/pom.xml | 2 +- .../examples/DoidExampleLocalSyntax.java | 24 ++++----- .../vlog4j/examples/SimpleExampleJavaCC.java | 18 ++++--- .../implementation/javacc/JavaCCRuleParser.jj | 44 ++++++++-------- .../PrefixDeclarationException.java} | 6 +-- .../syntax/common/PrefixDeclarations.java | 19 ++++--- .../parser/LocalPrefixDeclarations.java | 37 ++++++++------ .../syntax/parser/ParsingException.java | 50 +++++++++++++++++++ .../vlog4j/syntax/parser/RuleParser.java | 50 ++++++++++++++++--- .../vlog4j/syntax/parser/RuleParserBase.java | 29 ++++------- 10 files changed, 184 insertions(+), 95 deletions(-) rename vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/{parser/PrologueException.java => common/PrefixDeclarationException.java} (82%) create mode 100644 vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/ParsingException.java diff --git a/vlog4j-examples/pom.xml b/vlog4j-examples/pom.xml index a36d00215..615bd2636 100644 --- a/vlog4j-examples/pom.xml +++ b/vlog4j-examples/pom.xml @@ -39,7 +39,7 @@ ${project.groupId} - vlog4j-parser + vlog4j-syntax ${project.version} diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExampleLocalSyntax.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExampleLocalSyntax.java index 03d330284..be15d11e4 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExampleLocalSyntax.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExampleLocalSyntax.java @@ -38,12 +38,8 @@ import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; - -import org.semanticweb.vlog4j.parser.api.RuleParser; -import org.semanticweb.vlog4j.parser.implementation.PrologueException; -import org.semanticweb.vlog4j.parser.implementation.javacc.ParseException; - - +import org.semanticweb.vlog4j.syntax.parser.ParsingException; +import org.semanticweb.vlog4j.syntax.parser.RuleParser; /** * This example reasons about human diseases, based on information from the @@ -57,15 +53,15 @@ */ public class DoidExampleLocalSyntax { - public static void main(final String[] args) throws ReasonerStateException, IOException, EdbIdbSeparationException, - IncompatiblePredicateArityException, ParseException, PrologueException { + public static void main(final String[] args) + throws ReasonerStateException, IOException, EdbIdbSeparationException, IncompatiblePredicateArityException { ExamplesUtils.configureLogging(); final URL wikidataSparqlEndpoint = new URL("https://query.wikidata.org/sparql"); try (final Reasoner reasoner = Reasoner.getInstance()) { - reasoner.setLogFile(ExamplesUtils.OUTPUT_FOLDER+"vlog.log"); + reasoner.setLogFile(ExamplesUtils.OUTPUT_FOLDER + "vlog.log"); reasoner.setLogLevel(LogLevel.DEBUG); /* Configure RDF data source */ @@ -96,9 +92,13 @@ public static void main(final String[] args) throws ReasonerStateException, IOEx final Predicate recentDeathsCausePredicate = Expressions.makePredicate("recentDeathsCause", 2); reasoner.addFactsFromDataSource(recentDeathsCausePredicate, recentDeathsCauseDataSource); - RuleParser rp = new RuleParser( - new FileInputStream(ExamplesUtils.INPUT_FOLDER + "/localSyntax/doid-example.txt")); - rp.parse(); + RuleParser rp = new RuleParser(); + try { + rp.parse(new FileInputStream(ExamplesUtils.INPUT_FOLDER + "/localSyntax/doid-example.txt")); + } catch (ParsingException e) { + System.out.println("Failed to parse rules: " + e.getMessage()); + return; + } reasoner.addRules(rp.getRules()); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleExampleJavaCC.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleExampleJavaCC.java index 009758326..d34dd2170 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleExampleJavaCC.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleExampleJavaCC.java @@ -28,13 +28,12 @@ import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; -import org.semanticweb.vlog4j.parser.api.RuleParser; -import org.semanticweb.vlog4j.parser.implementation.PrologueException; -import org.semanticweb.vlog4j.parser.implementation.javacc.ParseException; +import org.semanticweb.vlog4j.syntax.parser.ParsingException; +import org.semanticweb.vlog4j.syntax.parser.RuleParser; public class SimpleExampleJavaCC { - public static void main(final String[] args) throws ParseException, PrologueException, ReasonerStateException, - EdbIdbSeparationException, IncompatiblePredicateArityException, IOException { + public static void main(final String[] args) + throws ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException, IOException { ExamplesUtils.configureLogging(); @@ -48,8 +47,13 @@ public static void main(final String[] args) throws ParseException, PrologueExce rules += "(?x,?y) :- (?x) . \n"; rules += "(?x,?y) . \n"; - RuleParser rp = new RuleParser(rules); - rp.parse(); + RuleParser rp = new RuleParser(); + try { + rp.parse(rules); + } catch (ParsingException e) { + System.out.println("Failed to parse rules: " + e.getMessage()); + return; + } reasoner.addFacts(rp.getFacts()); reasoner.addRules(rp.getRules()); diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj index 1873e4771..cc17976b9 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj @@ -19,7 +19,7 @@ import java.util.Set; import java.util.HashSet; import org.semanticweb.vlog4j.syntax.parser.RuleParserBase; -import org.semanticweb.vlog4j.syntax.parser.PrologueException; +import org.semanticweb.vlog4j.syntax.common.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Literal; @@ -51,7 +51,7 @@ public class JavaCCRuleParser extends RuleParserBase PARSER_END(JavaCCRuleParser) -void parse() throws PrologueException: +void parse() throws PrefixDeclarationException: { } { @@ -61,18 +61,18 @@ void parse() throws PrologueException: < EOF > } -void base() throws PrologueException: +void base() throws PrefixDeclarationException: { String iriString; } { < BASE > iriString = IRIREF() < DOT > { - localPrologue.setBase(iriString); + prefixDeclarations.setBase(iriString); } } -void prefix() throws PrologueException: +void prefix() throws PrefixDeclarationException: { Token t; String iriString; @@ -81,11 +81,11 @@ void prefix() throws PrologueException: < PREFIX > t = < PNAME_NS > iriString = IRIREF() < DOT > { //note that prefix includes the colon (:) - localPrologue.setPrefix(t.image, iriString); + prefixDeclarations.setPrefix(t.image, iriString); } } -void statement() throws PrologueException: +void statement() throws PrefixDeclarationException: { Rule r; PositiveLiteral l; @@ -101,7 +101,7 @@ void statement() throws PrologueException: } } -Rule rule() throws PrologueException: +Rule rule() throws PrefixDeclarationException: { Rule rule; List < PositiveLiteral > head; @@ -143,7 +143,7 @@ Rule rule() throws PrologueException: } } -List < PositiveLiteral > listOfPositiveLiterals(int itComesFrom) throws PrologueException: +List < PositiveLiteral > listOfPositiveLiterals(int itComesFrom) throws PrefixDeclarationException: { PositiveLiteral l; List < PositiveLiteral > list = new ArrayList < PositiveLiteral > (); @@ -154,7 +154,7 @@ List < PositiveLiteral > listOfPositiveLiterals(int itComesFrom) throws Prologue { return list; } } -List < Literal > listOfLiterals(int itComesFrom) throws PrologueException: +List < Literal > listOfLiterals(int itComesFrom) throws PrefixDeclarationException: { Literal l; List < Literal > list = new ArrayList < Literal > (); @@ -165,7 +165,7 @@ List < Literal > listOfLiterals(int itComesFrom) throws PrologueException: { return list; } } -Literal literal(int itComesFrom) throws PrologueException: +Literal literal(int itComesFrom) throws PrefixDeclarationException: { Literal l = null; } @@ -174,7 +174,7 @@ Literal literal(int itComesFrom) throws PrologueException: | l = negativeLiteral(itComesFrom) { return l; } } -PositiveLiteral positiveLiteral(int itComesFrom) throws PrologueException: +PositiveLiteral positiveLiteral(int itComesFrom) throws PrefixDeclarationException: { Token t; List < Term > terms; @@ -185,7 +185,7 @@ PositiveLiteral positiveLiteral(int itComesFrom) throws PrologueException: { return makePositiveLiteral(predicateName, terms); } } -NegativeLiteral negativeLiteral(int itComesFrom) throws PrologueException: +NegativeLiteral negativeLiteral(int itComesFrom) throws PrefixDeclarationException: { List < Term > terms; String predicateName; @@ -195,7 +195,7 @@ NegativeLiteral negativeLiteral(int itComesFrom) throws PrologueException: { return makeNegativeLiteral(predicateName, terms); } } -List < Term > listOfTerms(int itComesFrom) throws PrologueException: +List < Term > listOfTerms(int itComesFrom) throws PrefixDeclarationException: { Term t; List < Term > list = new ArrayList < Term > (); @@ -206,7 +206,7 @@ List < Term > listOfTerms(int itComesFrom) throws PrologueException: { return list; } } -String predicateName() throws PrologueException: +String predicateName() throws PrefixDeclarationException: { String s; Token t; @@ -220,7 +220,7 @@ String predicateName() throws PrologueException: // 0 if the variable does not come from a rule // 1 if the variable comes from the head of a rule // 2 if the variable comes from the body of a rule -Term term(int itComesFrom) throws PrologueException: +Term term(int itComesFrom) throws PrefixDeclarationException: { String s; Token t; @@ -259,7 +259,7 @@ Constant NumericLiteral() : | t = < DOUBLE > { return createLiteralDouble(t.image); } } -String RDFLiteral() throws PrologueException: +String RDFLiteral() throws PrefixDeclarationException: { Token t; String lex = null; @@ -312,7 +312,7 @@ String String(): } } -String IRI() throws PrologueException: +String IRI() throws PrefixDeclarationException: { String iri; } @@ -321,10 +321,10 @@ String IRI() throws PrologueException: iri = IRIREF() | iri = PrefixedName() ) - { return "<"+localPrologue.absolutize(iri)+">"; } + { return "<"+prefixDeclarations.absolutize(iri)+">"; } } -String PrefixedName() throws PrologueException: +String PrefixedName() throws PrefixDeclarationException: { Token t; } @@ -333,8 +333,8 @@ String PrefixedName() throws PrologueException: t = < PNAME_LN > | t = < PNAME_NS > ) - { return localPrologue.resolvePrefixedName(t.image);} - //{ return localPrologue.resolvePrefixedName(t.image, t.beginLine, t.beginColumn);} + { return prefixDeclarations.resolvePrefixedName(t.image);} + //{ return prefixDeclarations.resolvePrefixedName(t.image, t.beginLine, t.beginColumn);} } String IRIREF() : diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/PrologueException.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/PrefixDeclarationException.java similarity index 82% rename from vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/PrologueException.java rename to vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/PrefixDeclarationException.java index d99ddad60..76b8a7d21 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/PrologueException.java +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/PrefixDeclarationException.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.syntax.parser; +package org.semanticweb.vlog4j.syntax.common; /*- * #%L @@ -20,13 +20,13 @@ * #L% */ -public class PrologueException extends Exception { +public class PrefixDeclarationException extends Exception { /** * */ private static final long serialVersionUID = 1L; - public PrologueException(String errorMessage) { + public PrefixDeclarationException(String errorMessage) { super(errorMessage); } } diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/PrefixDeclarations.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/PrefixDeclarations.java index 7ac91514a..266f604cc 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/PrefixDeclarations.java +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/PrefixDeclarations.java @@ -20,8 +20,6 @@ * #L% */ -import org.semanticweb.vlog4j.syntax.parser.PrologueException; - /** * Registry that manages prefixes and base namespace declarations as used for * parsing and serialising inputs. @@ -30,6 +28,11 @@ */ public interface PrefixDeclarations { + /** + * Default base IRI to be assumed if no base is given. + */ + static public String DEFAULT_BASE = "http://localhost/LocalHostBase/"; + /** * Returns the relevant base namespace. This should always return a result, * possibly using a local default value if no base was declared. @@ -43,16 +46,16 @@ public interface PrefixDeclarations { * and not after the base namespace was assumed to be an implicit default value. * * @param base the new base namespace - * @throws PrologueException + * @throws PrefixDeclarationException if base was already defined */ - void setBase(String base) throws PrologueException; + void setBase(String base) throws PrefixDeclarationException; - String getPrefix(String prefix) throws PrologueException; + String getPrefix(String prefix) throws PrefixDeclarationException; - void setPrefix(String prefix, String iri) throws PrologueException; + void setPrefix(String prefix, String iri) throws PrefixDeclarationException; - String resolvePrefixedName(String prefixedName) throws PrologueException; + String resolvePrefixedName(String prefixedName) throws PrefixDeclarationException; - String absolutize(String prefixedName) throws PrologueException; + String absolutize(String prefixedName) throws PrefixDeclarationException; } diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/LocalPrefixDeclarations.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/LocalPrefixDeclarations.java index fff92454d..6cd2ab9a4 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/LocalPrefixDeclarations.java +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/LocalPrefixDeclarations.java @@ -25,37 +25,42 @@ import java.util.Map; import org.semanticweb.vlog4j.syntax.common.PrefixDeclarations; +import org.semanticweb.vlog4j.syntax.common.PrefixDeclarationException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +/** + * Implementation of {@link PrefixDeclarations} that is used when parsing data + * from a single source. In this case, attempts to re-declare prefixes or the + * base IRI will lead to errors. + * + * @author Markus Kroetzsch + * + */ final public class LocalPrefixDeclarations implements PrefixDeclarations { final static Logger logger = LoggerFactory.getLogger(LocalPrefixDeclarations.class.getName()); Map prefixes = new HashMap<>(); - final String defaultBaseUri; String baseUri; - LocalPrefixDeclarations(String defaultBaseUri) { - this.defaultBaseUri = defaultBaseUri; - } - public String getBase() { if (this.baseUri == null) { - this.baseUri = this.defaultBaseUri; + this.baseUri = PrefixDeclarations.DEFAULT_BASE; } return baseUri.toString(); } - public String getPrefix(String prefix) throws PrologueException { - if (!prefixes.containsKey(prefix)) - throw new PrologueException("@prefix " + prefix + " not defined"); + public String getPrefix(String prefix) throws PrefixDeclarationException { + if (!prefixes.containsKey(prefix)) { + throw new PrefixDeclarationException("@prefix " + prefix + " not defined"); + } return prefixes.get(prefix).toString(); } - public void setPrefix(String prefix, String uri) throws PrologueException { + public void setPrefix(String prefix, String uri) throws PrefixDeclarationException { if (prefixes.containsKey(prefix)) { - throw new PrologueException("Prefix " + prefix + " is already defined as <" + prefixes.get(prefix) + throw new PrefixDeclarationException("Prefix " + prefix + " is already defined as <" + prefixes.get(prefix) + ">. It cannot be redefined to mean <" + uri + ">."); } @@ -63,15 +68,15 @@ public void setPrefix(String prefix, String uri) throws PrologueException { prefixes.put(prefix, uri); } - public void setBase(String baseUri) throws PrologueException { + public void setBase(String baseUri) throws PrefixDeclarationException { if (this.baseUri != null) - throw new PrologueException( + throw new PrefixDeclarationException( "Base is already defined as <" + this.baseUri + "> and cannot be re-defined as " + baseUri); logger.info("Setting base URI: " + baseUri); this.baseUri = baseUri; } - public String resolvePrefixedName(String prefixedName) throws PrologueException { + public String resolvePrefixedName(String prefixedName) throws PrefixDeclarationException { // from the parser we know that prefixedName is of the form: // prefix:something // remember that the prefixes are stored with the colon symbol @@ -84,11 +89,11 @@ public String resolvePrefixedName(String prefixedName) throws PrologueException if (prefixes.containsKey(prefix)) { return this.prefixes.get(prefix) + suffix; } else { - throw new PrologueException("Prefix " + prefixedName + " cannot be resolved (not declared yet)."); + throw new PrefixDeclarationException("Prefix " + prefixedName + " cannot be resolved (not declared yet)."); } } - public String absolutize(String iri) throws PrologueException { + public String absolutize(String iri) throws PrefixDeclarationException { URI relative = URI.create(iri); if (relative.isAbsolute()) { return iri; diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/ParsingException.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/ParsingException.java new file mode 100644 index 000000000..72d76d0fe --- /dev/null +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/ParsingException.java @@ -0,0 +1,50 @@ +package org.semanticweb.vlog4j.syntax.parser; + +/*- + * #%L + * vlog4j-syntax + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +public class ParsingException extends Exception { + + /** + * + */ + private static final long serialVersionUID = 2849123381757026724L; + + public ParsingException() { + super(); + } + + public ParsingException(String message) { + super(message); + } + + public ParsingException(Throwable cause) { + super(cause); + } + + public ParsingException(String message, Throwable cause) { + super(message, cause); + } + + public ParsingException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { + super(message, cause, enableSuppression, writableStackTrace); + } + +} diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParser.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParser.java index 96a042bb9..087fe19c0 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParser.java +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParser.java @@ -22,21 +22,57 @@ import java.io.ByteArrayInputStream; import java.io.InputStream; +import java.util.List; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.parser.implementation.javacc.JavaCCRuleParser; +import org.semanticweb.vlog4j.parser.implementation.javacc.ParseException; +import org.semanticweb.vlog4j.syntax.common.PrefixDeclarationException; +/** + * Class to access VLog parsing functionality. + * + * @FIXME Support parsing from multiple files (into one KB). + * + * @author Markus Kroetzsch + * + */ +public class RuleParser { -public class RuleParser extends JavaCCRuleParser { + JavaCCRuleParser parser; - public RuleParser(InputStream stream) { - super(stream, "UTF-8"); + public void parse(InputStream stream, String encoding) throws ParsingException { + parser = new JavaCCRuleParser(stream, encoding); + doParse(); } - public RuleParser(InputStream stream, String encoding) { - super(stream, encoding); + public void parse(InputStream stream) throws ParsingException { + parse(stream, "UTF-8"); + } + + public void parse(String input) throws ParsingException { + InputStream inputStream = new ByteArrayInputStream(input.getBytes()); + parse(inputStream, "UTF-8"); + } + + void doParse() throws ParsingException { + try { + parser.parse(); + } catch (ParseException | PrefixDeclarationException e) { + throw new ParsingException(e.getMessage(), e); + } + } + + public List getRules() { + return parser.getRules(); + } + + public List getQueries() { + return parser.getQueries(); } - public RuleParser(String rules) { - super(new ByteArrayInputStream(rules.getBytes()), "UTF-8"); + public List getFacts() { + return parser.getFacts(); } } diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParserBase.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParserBase.java index 322ed57ea..70a10fca6 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParserBase.java +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParserBase.java @@ -31,26 +31,17 @@ import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeConstant; +/** + * Basic methods used in the JavaCC-generated parser. + * + * @author Markus Kroetzsch + * + */ public class RuleParserBase { - protected PrefixDeclarations localPrologue; - protected List listOfRules; - protected List listOfFacts; - protected List listOfQueries; - - public RuleParserBase() { - localPrologue = new LocalPrefixDeclarations("http://example.org/todo/"); - listOfRules = new ArrayList(); - listOfFacts = new ArrayList(); - listOfQueries = new ArrayList(); - } - - public RuleParserBase(PrefixDeclarations prologue, List listOfRules, List listOfFacts, - List listOfQueries) { - this.localPrologue = prologue; - this.listOfRules = listOfRules; - this.listOfFacts = listOfFacts; - this.listOfQueries = listOfQueries; - } + final protected PrefixDeclarations prefixDeclarations = new LocalPrefixDeclarations(); + final protected List listOfRules = new ArrayList<>(); + final protected List listOfFacts = new ArrayList<>(); + final protected List listOfQueries = new ArrayList<>(); protected Constant createLiteralInteger(String lexicalForm) { // this method should be eliminated From fa1b2478c66072a0e10286f5a4b2a0d80258a60e Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 14 Aug 2019 15:56:23 +0200 Subject: [PATCH 0039/1003] Remove dependency on JUnit 3 --- vlog4j-syntax/pom.xml | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/vlog4j-syntax/pom.xml b/vlog4j-syntax/pom.xml index 7f827bf3b..3e93770a6 100644 --- a/vlog4j-syntax/pom.xml +++ b/vlog4j-syntax/pom.xml @@ -13,7 +13,7 @@ vlog4j-syntax - vlog4j-syntax + VLog4j Syntax http://maven.apache.org UTF-8 @@ -24,12 +24,6 @@ vlog4j-core ${project.version} - - junit - junit - 3.8.1 - test - From da705f4cf7e3bdbac4775a978321f82362b0dd73 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 14 Aug 2019 16:29:32 +0200 Subject: [PATCH 0040/1003] Test for desired parsing behaviour --- .../vlog4j/syntax/parser/RuleParserTest.java | 64 +++++++++++++++++++ 1 file changed, 64 insertions(+) create mode 100644 vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java diff --git a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java new file mode 100644 index 000000000..e2a832705 --- /dev/null +++ b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -0,0 +1,64 @@ +package org.semanticweb.vlog4j.syntax.parser; + +import static org.junit.Assert.*; + +import java.util.Arrays; + +import org.junit.Test; +import org.semanticweb.vlog4j.core.model.api.Conjunction; +import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.Literal; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Variable; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; + +public class RuleParserTest { + + final Variable x = Expressions.makeVariable("X"); + final Variable y = Expressions.makeVariable("Y"); + final Variable z = Expressions.makeVariable("Z"); + final Constant c = Expressions.makeConstant("http://example.org/c"); + final Constant d = Expressions.makeConstant("http://example.org/d"); + final Literal atom1 = Expressions.makePositiveLiteral("http://example.org/p", x, c); + final Literal atom2 = Expressions.makePositiveLiteral("http://example.org/p", x, z); + final PositiveLiteral atom3 = Expressions.makePositiveLiteral("http://example.org/q", x, y); + final PositiveLiteral atom4 = Expressions.makePositiveLiteral("http://example.org/r", x, d); + final PositiveLiteral atom5 = Expressions.makePositiveLiteral("http://example.org/s", c); + final Conjunction body = Expressions.makeConjunction(atom1, atom2); + final Conjunction head = Expressions.makePositiveConjunction(atom3, atom4); + final Rule rule = Expressions.makeRule(head, body); + + @Test + public void testExplicitIri() throws ParsingException { + String input = "() ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + assertEquals(Arrays.asList(atom5), ruleParser.getFacts()); + } + + @Test + public void testPrefixResolution() throws ParsingException { + String input = "@prefix ex: . ex:s(ex:c) ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + assertEquals(Arrays.asList(atom5), ruleParser.getFacts()); + } + + @Test + public void testBaseRelativeResolution() throws ParsingException { + String input = "@base . () ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + assertEquals(Arrays.asList(atom5), ruleParser.getFacts()); + } + + @Test + public void testBaseResolution() throws ParsingException { + String input = "@base . s(c) ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + assertEquals(Arrays.asList(atom5), ruleParser.getFacts()); + } + +} From d35b6137b3a5de110804c75f23c38eb79f110c3e Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Wed, 14 Aug 2019 17:04:11 +0200 Subject: [PATCH 0041/1003] fix parser behaviour --- .../parser/implementation/javacc/JavaCCRuleParser.jj | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj index cc17976b9..f1485aa0c 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj @@ -213,7 +213,7 @@ String predicateName() throws PrefixDeclarationException: } { s = IRI() { return s; } -| t = < VARORPREDNAME > { return t.image; } +| t = < VARORPREDNAME > { return prefixDeclarations.absolutize(t.image); } } // we use an int to specify where a variable comes from @@ -246,6 +246,7 @@ Term term(int itComesFrom) throws PrefixDeclarationException: throw new ParseException("Existentialy quantified variables can not appear in the body. Line: " + t.beginLine + ", Column: "+ t.beginColumn); return makeVariable(t.image.substring(1)); } +| t = < VARORPREDNAME > { return makeConstant(prefixDeclarations.absolutize(t.image));} } /** [16] */ @@ -321,7 +322,9 @@ String IRI() throws PrefixDeclarationException: iri = IRIREF() | iri = PrefixedName() ) - { return "<"+prefixDeclarations.absolutize(iri)+">"; } + { return prefixDeclarations.absolutize(iri); + //return "<"+prefixDeclarations.absolutize(iri)+">"; + } } String PrefixedName() throws PrefixDeclarationException: From b4d63b26a299626ad7943b682bea7dc78a43ffac Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 14 Aug 2019 17:52:19 +0200 Subject: [PATCH 0042/1003] Support relative IRIs if no base is declared --- .../syntax/common/PrefixDeclarations.java | 5 --- .../parser/LocalPrefixDeclarations.java | 2 +- .../vlog4j/syntax/parser/RuleParserTest.java | 33 +++++++++++++++++-- 3 files changed, 32 insertions(+), 8 deletions(-) diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/PrefixDeclarations.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/PrefixDeclarations.java index 266f604cc..c4ecd8335 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/PrefixDeclarations.java +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/PrefixDeclarations.java @@ -28,11 +28,6 @@ */ public interface PrefixDeclarations { - /** - * Default base IRI to be assumed if no base is given. - */ - static public String DEFAULT_BASE = "http://localhost/LocalHostBase/"; - /** * Returns the relevant base namespace. This should always return a result, * possibly using a local default value if no base was declared. diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/LocalPrefixDeclarations.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/LocalPrefixDeclarations.java index 6cd2ab9a4..b6e996c63 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/LocalPrefixDeclarations.java +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/LocalPrefixDeclarations.java @@ -46,7 +46,7 @@ final public class LocalPrefixDeclarations implements PrefixDeclarations { public String getBase() { if (this.baseUri == null) { - this.baseUri = PrefixDeclarations.DEFAULT_BASE; + this.baseUri = ""; // empty string encodes: "no base" (use relative IRIs) } return baseUri.toString(); } diff --git a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index e2a832705..2b57d0585 100644 --- a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -1,5 +1,25 @@ package org.semanticweb.vlog4j.syntax.parser; +/*- + * #%L + * VLog4j Syntax + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import static org.junit.Assert.*; import java.util.Arrays; @@ -36,7 +56,7 @@ public void testExplicitIri() throws ParsingException { ruleParser.parse(input); assertEquals(Arrays.asList(atom5), ruleParser.getFacts()); } - + @Test public void testPrefixResolution() throws ParsingException { String input = "@prefix ex: . ex:s(ex:c) ."; @@ -52,7 +72,7 @@ public void testBaseRelativeResolution() throws ParsingException { ruleParser.parse(input); assertEquals(Arrays.asList(atom5), ruleParser.getFacts()); } - + @Test public void testBaseResolution() throws ParsingException { String input = "@base . s(c) ."; @@ -61,4 +81,13 @@ public void testBaseResolution() throws ParsingException { assertEquals(Arrays.asList(atom5), ruleParser.getFacts()); } + @Test + public void tesNoBaseRelativeIri() throws ParsingException { + PositiveLiteral atom = Expressions.makePositiveLiteral("s", Expressions.makeConstant("c")); + String input = "s(c) ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + assertEquals(Arrays.asList(atom), ruleParser.getFacts()); + } + } From 9139fc1406af37c128d752257e912ef2f5e1788b Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 14 Aug 2019 18:46:31 +0200 Subject: [PATCH 0043/1003] typo, style --- .../org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 2b57d0585..e07b49c00 100644 --- a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -82,11 +82,11 @@ public void testBaseResolution() throws ParsingException { } @Test - public void tesNoBaseRelativeIri() throws ParsingException { - PositiveLiteral atom = Expressions.makePositiveLiteral("s", Expressions.makeConstant("c")); + public void testNoBaseRelativeIri() throws ParsingException { String input = "s(c) ."; RuleParser ruleParser = new RuleParser(); ruleParser.parse(input); + PositiveLiteral atom = Expressions.makePositiveLiteral("s", Expressions.makeConstant("c")); assertEquals(Arrays.asList(atom), ruleParser.getFacts()); } From eb65cb07b1152d6a04cd7771d85cd9b6680c9665 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 14 Aug 2019 19:03:14 +0200 Subject: [PATCH 0044/1003] simplify code --- .../vlog4j/syntax/parser/LocalPrefixDeclarations.java | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/LocalPrefixDeclarations.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/LocalPrefixDeclarations.java index b6e996c63..425c6469c 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/LocalPrefixDeclarations.java +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/LocalPrefixDeclarations.java @@ -53,7 +53,7 @@ public String getBase() { public String getPrefix(String prefix) throws PrefixDeclarationException { if (!prefixes.containsKey(prefix)) { - throw new PrefixDeclarationException("@prefix " + prefix + " not defined"); + throw new PrefixDeclarationException("Prefix " + prefix + " cannot be resolved (not declared yet)."); } return prefixes.get(prefix).toString(); } @@ -86,11 +86,7 @@ public String resolvePrefixedName(String prefixedName) throws PrefixDeclarationE String prefix = prefixedName.substring(0, idx); String suffix = prefixedName.substring(idx); - if (prefixes.containsKey(prefix)) { - return this.prefixes.get(prefix) + suffix; - } else { - throw new PrefixDeclarationException("Prefix " + prefixedName + " cannot be resolved (not declared yet)."); - } + return getPrefix(prefix) + suffix; } public String absolutize(String iri) throws PrefixDeclarationException { From b3d8013d7674a365c62893475046e8f448ee6d00 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 14 Aug 2019 19:03:26 +0200 Subject: [PATCH 0045/1003] additional tests --- .../vlog4j/syntax/parser/RuleParserTest.java | 39 ++++++++++++++++--- 1 file changed, 34 insertions(+), 5 deletions(-) diff --git a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index e07b49c00..165f6c45e 100644 --- a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -44,7 +44,7 @@ public class RuleParserTest { final Literal atom2 = Expressions.makePositiveLiteral("http://example.org/p", x, z); final PositiveLiteral atom3 = Expressions.makePositiveLiteral("http://example.org/q", x, y); final PositiveLiteral atom4 = Expressions.makePositiveLiteral("http://example.org/r", x, d); - final PositiveLiteral atom5 = Expressions.makePositiveLiteral("http://example.org/s", c); + final PositiveLiteral fact = Expressions.makePositiveLiteral("http://example.org/s", c); final Conjunction body = Expressions.makeConjunction(atom1, atom2); final Conjunction head = Expressions.makePositiveConjunction(atom3, atom4); final Rule rule = Expressions.makeRule(head, body); @@ -54,7 +54,7 @@ public void testExplicitIri() throws ParsingException { String input = "() ."; RuleParser ruleParser = new RuleParser(); ruleParser.parse(input); - assertEquals(Arrays.asList(atom5), ruleParser.getFacts()); + assertEquals(Arrays.asList(fact), ruleParser.getFacts()); } @Test @@ -62,7 +62,7 @@ public void testPrefixResolution() throws ParsingException { String input = "@prefix ex: . ex:s(ex:c) ."; RuleParser ruleParser = new RuleParser(); ruleParser.parse(input); - assertEquals(Arrays.asList(atom5), ruleParser.getFacts()); + assertEquals(Arrays.asList(fact), ruleParser.getFacts()); } @Test @@ -70,7 +70,7 @@ public void testBaseRelativeResolution() throws ParsingException { String input = "@base . () ."; RuleParser ruleParser = new RuleParser(); ruleParser.parse(input); - assertEquals(Arrays.asList(atom5), ruleParser.getFacts()); + assertEquals(Arrays.asList(fact), ruleParser.getFacts()); } @Test @@ -78,7 +78,7 @@ public void testBaseResolution() throws ParsingException { String input = "@base . s(c) ."; RuleParser ruleParser = new RuleParser(); ruleParser.parse(input); - assertEquals(Arrays.asList(atom5), ruleParser.getFacts()); + assertEquals(Arrays.asList(fact), ruleParser.getFacts()); } @Test @@ -90,4 +90,33 @@ public void testNoBaseRelativeIri() throws ParsingException { assertEquals(Arrays.asList(atom), ruleParser.getFacts()); } + @Test(expected = ParsingException.class) + public void testPrefixConflict() throws ParsingException { + String input = "@prefix ex: . @prefix ex: . s(c) ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + } + + @Test(expected = ParsingException.class) + public void testBaseConflict() throws ParsingException { + String input = "@base . @base . s(c) ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + } + + @Test(expected = ParsingException.class) + public void testMissingPrefix() throws ParsingException { + String input = "ex:s(c) ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + } + + @Test + public void testSimpleRule() throws ParsingException { + String input = "@base . " + " q(?X, !Y), r(?X, d) :- p(?X,c), p(?X,?Z) . "; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + assertEquals(Arrays.asList(rule), ruleParser.getRules()); + } + } From 7d3ea43d7dacd653481a53eb602f2d30d2ee44f3 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 14 Aug 2019 19:06:06 +0200 Subject: [PATCH 0046/1003] test whitespace tolerance --- .../vlog4j/syntax/parser/RuleParserTest.java | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 165f6c45e..096ac7c7a 100644 --- a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -103,14 +103,14 @@ public void testBaseConflict() throws ParsingException { RuleParser ruleParser = new RuleParser(); ruleParser.parse(input); } - + @Test(expected = ParsingException.class) public void testMissingPrefix() throws ParsingException { String input = "ex:s(c) ."; RuleParser ruleParser = new RuleParser(); ruleParser.parse(input); } - + @Test public void testSimpleRule() throws ParsingException { String input = "@base . " + " q(?X, !Y), r(?X, d) :- p(?X,c), p(?X,?Z) . "; @@ -118,5 +118,14 @@ public void testSimpleRule() throws ParsingException { ruleParser.parse(input); assertEquals(Arrays.asList(rule), ruleParser.getRules()); } - + + @Test + public void testWhiteSpace() throws ParsingException { + String input = "@base \n\n . " + + " q(?X, !Y) , r(?X, d\t ) \n\n:- p(?X,c), p(?X,\n?Z) \n. "; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + assertEquals(Arrays.asList(rule), ruleParser.getRules()); + } + } From 45324e39b5272d5e7eb25b772a3641a23d5a2c53 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 14 Aug 2019 20:18:44 +0200 Subject: [PATCH 0047/1003] Further parser tests, some failing --- .../vlog4j/syntax/parser/RuleParserTest.java | 33 +++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 096ac7c7a..7f2d79230 100644 --- a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -40,11 +40,13 @@ public class RuleParserTest { final Variable z = Expressions.makeVariable("Z"); final Constant c = Expressions.makeConstant("http://example.org/c"); final Constant d = Expressions.makeConstant("http://example.org/d"); + final Constant abc = Expressions.makeConstant("\"abc\"^^"); final Literal atom1 = Expressions.makePositiveLiteral("http://example.org/p", x, c); final Literal atom2 = Expressions.makePositiveLiteral("http://example.org/p", x, z); final PositiveLiteral atom3 = Expressions.makePositiveLiteral("http://example.org/q", x, y); final PositiveLiteral atom4 = Expressions.makePositiveLiteral("http://example.org/r", x, d); final PositiveLiteral fact = Expressions.makePositiveLiteral("http://example.org/s", c); + final PositiveLiteral fact2 = Expressions.makePositiveLiteral("p", abc); final Conjunction body = Expressions.makeConjunction(atom1, atom2); final Conjunction head = Expressions.makePositiveConjunction(atom3, atom4); final Rule rule = Expressions.makeRule(head, body); @@ -128,4 +130,35 @@ public void testWhiteSpace() throws ParsingException { assertEquals(Arrays.asList(rule), ruleParser.getRules()); } + @Test(expected = ParsingException.class) + public void testNoUnsafeVariables() throws ParsingException { + String input = "p(?X,?Y) :- q(?X) ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + } + + @Test + public void testStringLiteral() throws ParsingException { + String input = "p(\"abc\") ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + assertEquals(Arrays.asList(fact2), ruleParser.getFacts()); + } + + @Test + public void testFullLiteral() throws ParsingException { + String input = "p(\"abc\"^^) ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + assertEquals(Arrays.asList(fact2), ruleParser.getFacts()); + } + + @Test + public void testPrefixedLiteral() throws ParsingException { + String input = "@prefix xsd: . " + "p(\"abc\"^^xsd:string) ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + assertEquals(Arrays.asList(fact2), ruleParser.getFacts()); + } + } From 79889be4dd8bf7435569f5f21eb88e6b4b9d718c Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 14 Aug 2019 20:31:15 +0200 Subject: [PATCH 0048/1003] test negation and comments --- .../vlog4j/syntax/parser/RuleParserTest.java | 62 ++++++++++++++----- 1 file changed, 45 insertions(+), 17 deletions(-) diff --git a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 7f2d79230..f05c9e0a6 100644 --- a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -35,21 +35,24 @@ public class RuleParserTest { - final Variable x = Expressions.makeVariable("X"); - final Variable y = Expressions.makeVariable("Y"); - final Variable z = Expressions.makeVariable("Z"); - final Constant c = Expressions.makeConstant("http://example.org/c"); - final Constant d = Expressions.makeConstant("http://example.org/d"); - final Constant abc = Expressions.makeConstant("\"abc\"^^"); - final Literal atom1 = Expressions.makePositiveLiteral("http://example.org/p", x, c); - final Literal atom2 = Expressions.makePositiveLiteral("http://example.org/p", x, z); - final PositiveLiteral atom3 = Expressions.makePositiveLiteral("http://example.org/q", x, y); - final PositiveLiteral atom4 = Expressions.makePositiveLiteral("http://example.org/r", x, d); - final PositiveLiteral fact = Expressions.makePositiveLiteral("http://example.org/s", c); - final PositiveLiteral fact2 = Expressions.makePositiveLiteral("p", abc); - final Conjunction body = Expressions.makeConjunction(atom1, atom2); - final Conjunction head = Expressions.makePositiveConjunction(atom3, atom4); - final Rule rule = Expressions.makeRule(head, body); + private final Variable x = Expressions.makeVariable("X"); + private final Variable y = Expressions.makeVariable("Y"); + private final Variable z = Expressions.makeVariable("Z"); + private final Constant c = Expressions.makeConstant("http://example.org/c"); + private final Constant d = Expressions.makeConstant("http://example.org/d"); + private final Constant abc = Expressions.makeConstant("\"abc\"^^"); + private final Literal atom1 = Expressions.makePositiveLiteral("http://example.org/p", x, c); + private final Literal negAtom1 = Expressions.makeNegativeLiteral("http://example.org/p", x, c); + private final Literal atom2 = Expressions.makePositiveLiteral("http://example.org/p", x, z); + private final PositiveLiteral atom3 = Expressions.makePositiveLiteral("http://example.org/q", x, y); + private final PositiveLiteral atom4 = Expressions.makePositiveLiteral("http://example.org/r", x, d); + private final PositiveLiteral fact = Expressions.makePositiveLiteral("http://example.org/s", c); + private final PositiveLiteral fact2 = Expressions.makePositiveLiteral("p", abc); + private final Conjunction body1 = Expressions.makeConjunction(atom1, atom2); + private final Conjunction body2 = Expressions.makeConjunction(negAtom1, atom2); + private final Conjunction head = Expressions.makePositiveConjunction(atom3, atom4); + private final Rule rule1 = Expressions.makeRule(head, body1); + private final Rule rule2 = Expressions.makeRule(head, body2); @Test public void testExplicitIri() throws ParsingException { @@ -118,7 +121,22 @@ public void testSimpleRule() throws ParsingException { String input = "@base . " + " q(?X, !Y), r(?X, d) :- p(?X,c), p(?X,?Z) . "; RuleParser ruleParser = new RuleParser(); ruleParser.parse(input); - assertEquals(Arrays.asList(rule), ruleParser.getRules()); + assertEquals(Arrays.asList(rule1), ruleParser.getRules()); + } + + @Test + public void testNegationRule() throws ParsingException { + String input = "@base . " + " q(?X, !Y), r(?X, d) :- ~p(?X,c), p(?X,?Z) . "; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + assertEquals(Arrays.asList(rule2), ruleParser.getRules()); + } + + @Test(expected = ParsingException.class) + public void testUnsafeNegationRule() throws ParsingException { + String input = "@base . " + " q(?X, !Y), r(?X, d) :- ~p(?Y,c), p(?X,?Z) . "; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); } @Test @@ -127,7 +145,7 @@ public void testWhiteSpace() throws ParsingException { + " q(?X, !Y) , r(?X, d\t ) \n\n:- p(?X,c), p(?X,\n?Z) \n. "; RuleParser ruleParser = new RuleParser(); ruleParser.parse(input); - assertEquals(Arrays.asList(rule), ruleParser.getRules()); + assertEquals(Arrays.asList(rule1), ruleParser.getRules()); } @Test(expected = ParsingException.class) @@ -160,5 +178,15 @@ public void testPrefixedLiteral() throws ParsingException { ruleParser.parse(input); assertEquals(Arrays.asList(fact2), ruleParser.getFacts()); } + + @Test + public void testLineComments() throws ParsingException { + String input = "@prefix ex: . % comment \n" + + "%@prefix ex: \n" + + " ex:s(ex:c) . % comment \n"; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + assertEquals(Arrays.asList(fact), ruleParser.getFacts()); + } } From 89eac75a6f375fde92aa4aa030b5abc6cc8933e0 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 14 Aug 2019 20:47:20 +0200 Subject: [PATCH 0049/1003] acknowledge Jena code --- .../semanticweb/vlog4j/syntax/parser/RuleParserBase.java | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParserBase.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParserBase.java index 70a10fca6..140cefd5b 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParserBase.java +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParserBase.java @@ -34,7 +34,15 @@ /** * Basic methods used in the JavaCC-generated parser. * + * Implementation of some string escaping methods adapted from Apache Jena, + * released under Apache 2.0 license terms. + * + * @see https://github.com/apache/jena/blob/master/jena-core/src/main/java/org/apache/jena/n3/turtle/ParserBase.java + * * @author Markus Kroetzsch + * @author Larry Gonzalez + * @author Jena developers, Apache Software Foundation (ASF) * */ public class RuleParserBase { From cf9a6e9be933554ee017d976b055ce7f245ccd44 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Wed, 14 Aug 2019 22:32:37 +0200 Subject: [PATCH 0050/1003] add angle brackets to IRIS in datatypes --- .../implementation/javacc/JavaCCRuleParser.jj | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj index f1485aa0c..fb9a16fe5 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj @@ -212,7 +212,7 @@ String predicateName() throws PrefixDeclarationException: Token t; } { - s = IRI() { return s; } + s = IRI(false) { return s; } | t = < VARORPREDNAME > { return prefixDeclarations.absolutize(t.image); } } @@ -226,7 +226,7 @@ Term term(int itComesFrom) throws PrefixDeclarationException: Token t; } { - s = IRI() { return makeConstant(s); } + s = IRI(false) { return makeConstant(s); } | s = RDFLiteral() { return makeConstant(s); } | t = < UNIVAR > { @@ -268,7 +268,7 @@ String RDFLiteral() throws PrefixDeclarationException: String dt = null; } { - lex = String() ( lang = Langtag() | < DATATYPE > dt = IRI() )? + lex = String() ( lang = Langtag() | < DATATYPE > dt = IRI(true) )? { return strRDFLiteral(lex, lang, dt); } } @@ -313,7 +313,7 @@ String String(): } } -String IRI() throws PrefixDeclarationException: +String IRI(boolean includeAngleBrackets) throws PrefixDeclarationException: { String iri; } @@ -322,7 +322,10 @@ String IRI() throws PrefixDeclarationException: iri = IRIREF() | iri = PrefixedName() ) - { return prefixDeclarations.absolutize(iri); + { String result = prefixDeclarations.absolutize(iri); + if (includeAngleBrackets) + result = "<"+result+">"; + return result; //return "<"+prefixDeclarations.absolutize(iri)+">"; } } From 2b65b72cab16e0905713ca225939152a4b0911b5 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Wed, 14 Aug 2019 23:21:05 +0200 Subject: [PATCH 0051/1003] remove import static makePredicate --- .../semanticweb/vlog4j/examples/DoidExampleLocalSyntax.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExampleLocalSyntax.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExampleLocalSyntax.java index be15d11e4..36214382a 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExampleLocalSyntax.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExampleLocalSyntax.java @@ -19,7 +19,6 @@ * limitations under the License. * #L% */ -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePredicate; import java.io.File; import java.io.FileInputStream; @@ -65,7 +64,7 @@ public static void main(final String[] args) reasoner.setLogLevel(LogLevel.DEBUG); /* Configure RDF data source */ - final Predicate doidTriplePredicate = makePredicate("doidTriple", 3); + final Predicate doidTriplePredicate = Expressions.makePredicate("doidTriple", 3); final DataSource doidDataSource = new RdfFileDataSource( new File(ExamplesUtils.INPUT_FOLDER + "doid.nt.gz")); reasoner.addFactsFromDataSource(doidTriplePredicate, doidDataSource); From d382f6013feda029c0c1460d7b68e6396abee521 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 15 Aug 2019 09:57:13 +0200 Subject: [PATCH 0052/1003] More tests --- .../vlog4j/syntax/parser/RuleParserTest.java | 38 ++++++++++++++++--- 1 file changed, 33 insertions(+), 5 deletions(-) diff --git a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index f05c9e0a6..3b189dd4b 100644 --- a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -123,7 +123,7 @@ public void testSimpleRule() throws ParsingException { ruleParser.parse(input); assertEquals(Arrays.asList(rule1), ruleParser.getRules()); } - + @Test public void testNegationRule() throws ParsingException { String input = "@base . " + " q(?X, !Y), r(?X, d) :- ~p(?X,c), p(?X,?Z) . "; @@ -131,7 +131,7 @@ public void testNegationRule() throws ParsingException { ruleParser.parse(input); assertEquals(Arrays.asList(rule2), ruleParser.getRules()); } - + @Test(expected = ParsingException.class) public void testUnsafeNegationRule() throws ParsingException { String input = "@base . " + " q(?X, !Y), r(?X, d) :- ~p(?Y,c), p(?X,?Z) . "; @@ -155,6 +155,13 @@ public void testNoUnsafeVariables() throws ParsingException { ruleParser.parse(input); } + @Test(expected = ParsingException.class) + public void testNoDollarVariables() throws ParsingException { + String input = "p($X) :- q($X) ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + } + @Test public void testStringLiteral() throws ParsingException { String input = "p(\"abc\") ."; @@ -163,6 +170,28 @@ public void testStringLiteral() throws ParsingException { assertEquals(Arrays.asList(fact2), ruleParser.getFacts()); } + @Test + public void testStringLiteralEscapes() throws ParsingException { + String input = "p(\"_\\\"_\\\\_\\n_\\t_\") ."; // User input: p("_\"_\\_\n_\t_") + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + PositiveLiteral fact = Expressions.makePositiveLiteral("p", + Expressions.makeConstant("\"_\"_\\_\n_\t_\"^^")); + assertEquals(Arrays.asList(fact), ruleParser.getFacts()); + } + + @Test + public void testStringLiteralMultiLine() throws ParsingException { + String input = "p('''line 1\n\n" + + "line 2\n" + + "line 3''') ."; // User input: p("a\"b\\c") + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + PositiveLiteral fact = Expressions.makePositiveLiteral("p", + Expressions.makeConstant("\"line 1\n\nline 2\nline 3\"^^")); + assertEquals(Arrays.asList(fact), ruleParser.getFacts()); + } + @Test public void testFullLiteral() throws ParsingException { String input = "p(\"abc\"^^) ."; @@ -178,11 +207,10 @@ public void testPrefixedLiteral() throws ParsingException { ruleParser.parse(input); assertEquals(Arrays.asList(fact2), ruleParser.getFacts()); } - + @Test public void testLineComments() throws ParsingException { - String input = "@prefix ex: . % comment \n" - + "%@prefix ex: \n" + String input = "@prefix ex: . % comment \n" + "%@prefix ex: \n" + " ex:s(ex:c) . % comment \n"; RuleParser ruleParser = new RuleParser(); ruleParser.parse(input); From 61ac0dd201bb2e9a034aea51708d0006f469f8ea Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 15 Aug 2019 09:57:32 +0200 Subject: [PATCH 0053/1003] small renaming --- .../vlog4j/syntax/parser/RuleParserBase.java | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParserBase.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParserBase.java index 140cefd5b..d51063e58 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParserBase.java +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParserBase.java @@ -47,9 +47,9 @@ */ public class RuleParserBase { final protected PrefixDeclarations prefixDeclarations = new LocalPrefixDeclarations(); - final protected List listOfRules = new ArrayList<>(); - final protected List listOfFacts = new ArrayList<>(); - final protected List listOfQueries = new ArrayList<>(); + final protected List rules = new ArrayList<>(); + final protected List facts = new ArrayList<>(); + final protected List queries = new ArrayList<>(); protected Constant createLiteralInteger(String lexicalForm) { // this method should be eliminated @@ -353,15 +353,15 @@ protected static String unescapePName(String s, int line, int column) throws Par } public List getRules() { - return listOfRules; + return rules; } public List getFacts() { - return listOfFacts; + return facts; } public List getQueries() { - return listOfQueries; + return queries; } } From 16f1c48e502b3fafcec5419e7db012b82cf47915 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 15 Aug 2019 10:33:36 +0200 Subject: [PATCH 0054/1003] Fix renaming in javacc; simplify code --- .../implementation/javacc/JavaCCRuleParser.jj | 36 +++++++------------ .../vlog4j/syntax/parser/RuleParserTest.java | 7 ++++ 2 files changed, 20 insertions(+), 23 deletions(-) diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj index fb9a16fe5..ddd39a6ba 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj @@ -91,13 +91,13 @@ void statement() throws PrefixDeclarationException: PositiveLiteral l; } { - LOOKAHEAD(rule()) r = rule() { listOfRules.add(r);} + LOOKAHEAD(rule()) r = rule() { rules.add(r);} | l = positiveLiteral(0) < DOT > //not from a rule { if (l.getVariables().isEmpty()) - listOfFacts.add(l); + facts.add(l); else - listOfQueries.add(l); + queries.add(l); } } @@ -109,35 +109,25 @@ Rule rule() throws PrefixDeclarationException: bodyVars = new HashSet(); headExiVars = new HashSet(); headUniVars = new HashSet(); - Set headExiVarsInterBodyVars; - Set headExiVarsInterheadUniVars; - Set headUniVarsInterBodyVars; } { // 1 rule's head // 2 rule's body head = listOfPositiveLiterals(1) < ARROW > body = listOfLiterals(2) < DOT > { - rule = makeRule(makePositiveConjunction(head), makeConjunction(body)); + // check that the intersection between headExiVars and BodyVars is empty - headExiVarsInterBodyVars = new HashSet(headExiVars); - headExiVarsInterBodyVars.retainAll(bodyVars); - if (!headExiVarsInterBodyVars.isEmpty()) { - throw new ParseException("\nMalformed Rule:\nSome existential variables appers in the body.\nRule: " + rule.toString()); - } - // check that the intersection between headExiVars and headUniVars is empty - headExiVarsInterheadUniVars = new HashSet(headExiVars); - headExiVarsInterheadUniVars.retainAll(headUniVars); - if (!headExiVarsInterBodyVars.isEmpty()) { - throw new ParseException("\nMalformed Rule:\nSome existential variables appers as universal variables in the head.\nRule: " + rule.toString()); - } + for (String variable : headExiVars) { + if (bodyVars.contains(variable)) + throw new ParseException("Malformed rule " + rule.toString() + "\nExistential variable " + variable + " also used in rule body."); + } + // check that bodyVars contains headUniVars - headUniVarsInterBodyVars = new HashSet(headUniVars); - headUniVarsInterBodyVars.retainAll(bodyVars); - if (!headUniVarsInterBodyVars.equals(headUniVars)) { - throw new ParseException("\nMalformed Rule:\nSome universal variables appearing in the head does not apper in the body.\nRule: " + rule.toString()); - } + for (String variable : headUniVars) { + if (!bodyVars.contains(variable)) + throw new ParseException("Unsafe rule " + rule.toString() + "\nUniversal variable " + variable + " occurs in head but not in body."); + } return rule; } diff --git a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 3b189dd4b..8edfa483e 100644 --- a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -154,6 +154,13 @@ public void testNoUnsafeVariables() throws ParsingException { RuleParser ruleParser = new RuleParser(); ruleParser.parse(input); } + + @Test(expected = ParsingException.class) + public void testNoConflictingQuantificationVariables() throws ParsingException { + String input = "p(?X,!X) :- q(?X) ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + } @Test(expected = ParsingException.class) public void testNoDollarVariables() throws ParsingException { From d605756536121222822b6f86e8e0291e36ea0fd6 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 15 Aug 2019 10:40:03 +0200 Subject: [PATCH 0055/1003] Test lang strings --- .../vlog4j/syntax/parser/RuleParserTest.java | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 8edfa483e..6e1117063 100644 --- a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -214,6 +214,16 @@ public void testPrefixedLiteral() throws ParsingException { ruleParser.parse(input); assertEquals(Arrays.asList(fact2), ruleParser.getFacts()); } + + @Test + public void testLangStringLiteral() throws ParsingException { + String input = "p(\"abc\"@en-gb) ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + PositiveLiteral fact = Expressions.makePositiveLiteral("p", + Expressions.makeConstant("\"abc\"@en-gb")); + assertEquals(Arrays.asList(fact), ruleParser.getFacts()); + } @Test public void testLineComments() throws ParsingException { From 83d57eee7dacbee0125bb3a8ec645630a883a6cb Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 15 Aug 2019 10:47:22 +0200 Subject: [PATCH 0056/1003] Use constants for XSD IRIs --- .../syntax/common/PrefixDeclarations.java | 7 ++++++ .../vlog4j/syntax/parser/RuleParserTest.java | 24 +++++++++---------- 2 files changed, 18 insertions(+), 13 deletions(-) diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/PrefixDeclarations.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/PrefixDeclarations.java index c4ecd8335..05bb21d3f 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/PrefixDeclarations.java +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/PrefixDeclarations.java @@ -28,6 +28,13 @@ */ public interface PrefixDeclarations { + static final String XSD = "http://www.w3.org/2001/XMLSchema#"; + static final String XSD_STRING = "http://www.w3.org/2001/XMLSchema#string"; + static final String XSD_DECIMAL = "http://www.w3.org/2001/XMLSchema#decimal"; + static final String XSD_FLOAT = "http://www.w3.org/2001/XMLSchema#float"; + static final String XSD_INTEGER = "http://www.w3.org/2001/XMLSchema#integer"; + static final String XSD_BOOLEAN = "http://www.w3.org/2001/XMLSchema#boolean"; + /** * Returns the relevant base namespace. This should always return a result, * possibly using a local default value if no base was declared. diff --git a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 6e1117063..7538a309b 100644 --- a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -32,6 +32,7 @@ import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.syntax.common.PrefixDeclarations; public class RuleParserTest { @@ -40,7 +41,7 @@ public class RuleParserTest { private final Variable z = Expressions.makeVariable("Z"); private final Constant c = Expressions.makeConstant("http://example.org/c"); private final Constant d = Expressions.makeConstant("http://example.org/d"); - private final Constant abc = Expressions.makeConstant("\"abc\"^^"); + private final Constant abc = Expressions.makeConstant("\"abc\"^^<" + PrefixDeclarations.XSD_STRING + ">"); private final Literal atom1 = Expressions.makePositiveLiteral("http://example.org/p", x, c); private final Literal negAtom1 = Expressions.makeNegativeLiteral("http://example.org/p", x, c); private final Literal atom2 = Expressions.makePositiveLiteral("http://example.org/p", x, z); @@ -154,7 +155,7 @@ public void testNoUnsafeVariables() throws ParsingException { RuleParser ruleParser = new RuleParser(); ruleParser.parse(input); } - + @Test(expected = ParsingException.class) public void testNoConflictingQuantificationVariables() throws ParsingException { String input = "p(?X,!X) :- q(?X) ."; @@ -179,26 +180,24 @@ public void testStringLiteral() throws ParsingException { @Test public void testStringLiteralEscapes() throws ParsingException { - String input = "p(\"_\\\"_\\\\_\\n_\\t_\") ."; // User input: p("_\"_\\_\n_\t_") + String input = "p(\"_\\\"_\\\\_\\n_\\t_\") ."; // User input: p("_\"_\\_\n_\t_") RuleParser ruleParser = new RuleParser(); ruleParser.parse(input); PositiveLiteral fact = Expressions.makePositiveLiteral("p", - Expressions.makeConstant("\"_\"_\\_\n_\t_\"^^")); + Expressions.makeConstant("\"_\"_\\_\n_\t_\"^^<" + PrefixDeclarations.XSD_STRING + ">")); assertEquals(Arrays.asList(fact), ruleParser.getFacts()); } @Test public void testStringLiteralMultiLine() throws ParsingException { - String input = "p('''line 1\n\n" - + "line 2\n" - + "line 3''') ."; // User input: p("a\"b\\c") + String input = "p('''line 1\n\n" + "line 2\n" + "line 3''') ."; // User input: p("a\"b\\c") RuleParser ruleParser = new RuleParser(); ruleParser.parse(input); PositiveLiteral fact = Expressions.makePositiveLiteral("p", - Expressions.makeConstant("\"line 1\n\nline 2\nline 3\"^^")); + Expressions.makeConstant("\"line 1\n\nline 2\nline 3\"^^<" + PrefixDeclarations.XSD_STRING + ">")); assertEquals(Arrays.asList(fact), ruleParser.getFacts()); } - + @Test public void testFullLiteral() throws ParsingException { String input = "p(\"abc\"^^) ."; @@ -209,19 +208,18 @@ public void testFullLiteral() throws ParsingException { @Test public void testPrefixedLiteral() throws ParsingException { - String input = "@prefix xsd: . " + "p(\"abc\"^^xsd:string) ."; + String input = "@prefix xsd: <" + PrefixDeclarations.XSD + "> . " + "p(\"abc\"^^xsd:string) ."; RuleParser ruleParser = new RuleParser(); ruleParser.parse(input); assertEquals(Arrays.asList(fact2), ruleParser.getFacts()); } - + @Test public void testLangStringLiteral() throws ParsingException { String input = "p(\"abc\"@en-gb) ."; RuleParser ruleParser = new RuleParser(); ruleParser.parse(input); - PositiveLiteral fact = Expressions.makePositiveLiteral("p", - Expressions.makeConstant("\"abc\"@en-gb")); + PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeConstant("\"abc\"@en-gb")); assertEquals(Arrays.asList(fact), ruleParser.getFacts()); } From 04b99246dba38711720cb0786ad0c355ec5bd3bc Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 15 Aug 2019 11:08:11 +0200 Subject: [PATCH 0057/1003] Update example rules file --- vlog4j-examples/src/main/data/input/doid.rls | 34 +++++++++++++++++++ .../data/input/localSyntax/doid-example.txt | 26 -------------- .../examples/DoidExampleLocalSyntax.java | 2 +- 3 files changed, 35 insertions(+), 27 deletions(-) create mode 100644 vlog4j-examples/src/main/data/input/doid.rls delete mode 100644 vlog4j-examples/src/main/data/input/localSyntax/doid-example.txt diff --git a/vlog4j-examples/src/main/data/input/doid.rls b/vlog4j-examples/src/main/data/input/doid.rls new file mode 100644 index 000000000..2997f31d4 --- /dev/null +++ b/vlog4j-examples/src/main/data/input/doid.rls @@ -0,0 +1,34 @@ +@prefix rdfs: . + +%%%%% Data sources used in this example (with input predicate names): +% doidTriple(S,P,O) -- triples from DOID ontology (loaded from RDF) +% recentDeaths(human) -- human who died recently (Wikidata IRI) +% recentDeathsCause(human, deathCause) -- cause of death of a human (both Wikidata IRIs) +% diseaseId(diseaseIri, doid) -- disease from Wikidata (IRI) with DOID (string identifier) + +% Combine recent death data (infer "unknown" cause if no cause given): +deathCause(?X, ?Z) :- recentDeathsCause(?X, ?Z) . +deathCause(?X, !Z) :- recentDeaths(?X) . + +% Mark Wikidata diseases that have a DOID: +hasDoid(?X) :- diseaseId(?X, ?DoidId) . + +% Relate DOID string ID (used on Wikidata) to DOID IRI (used in DOID ontology) +doid(?Iri, ?DoidId) :- doidTriple(?Iri, ,?DoidId) . + +% Compute transitive closure of DOID subclass hierarchy +diseaseHierarchy(?X, ?Y) :- doidTriple(?X, rdfs:subClassOf, ?Y) . +diseaseHierarchy(?X, ?Z) :- diseaseHierarchy(?X, ?Y), doidTriple(?Y, rdfs:subClassOf, ?Z) . + +% Find DOID ids for all subclasses of cancer: +cancerDisease(?Xdoid) :- diseaseHierarchy(?X, ?Y), doid(?Y, "DOID:162"), doid(?X, ?Xdoid) . + +% Compute who died of cancer and who died of something else (including diseases unknown to DOID): +humansWhoDiedOfCancer(?X) :- deathCause(?X, ?Y), diseaseId(?Y, ?Z), cancerDisease(?Z) . +humansWhoDiedOfNoncancer(?X) :- deathCause(?X, ?Y), diseaseId(?Y, ?Z), ~cancerDisease(?Z) . +humansWhoDiedOfNoncancer(?X) :- deathCause(?X, ?Y), ~hasDoid(?Y) . + +% Queries to be used in example application: +humansWhoDiedOfCancer(?X) . +humansWhoDiedOfNoncancer(?X) . + diff --git a/vlog4j-examples/src/main/data/input/localSyntax/doid-example.txt b/vlog4j-examples/src/main/data/input/localSyntax/doid-example.txt deleted file mode 100644 index 0d85f3b64..000000000 --- a/vlog4j-examples/src/main/data/input/localSyntax/doid-example.txt +++ /dev/null @@ -1,26 +0,0 @@ -@base . -@prefix xsd: . - -%this is a comment - -deathCause(?X, ?Z) :- recentDeathsCause(?X, ?Z) . -deathCause(?X, !Z) :- recentDeaths(?X) . - -doid(?Iri,?DoidId) :- doidTriple(?Iri,,?DoidId) . -hasDoid(?X) :- diseaseId(?X,?DoidId) . - -diseaseHierarchy(?X,?Y) :- doidTriple(?X,,?Y) . -diseaseHierarchy(?X,?Z) :- diseaseHierarchy(?X,?Y), doidTriple(?Y,,?Z) . - -%the next three rules are the same -%cancerDisease(?Xdoid) :- diseaseHierarchy(?X,?Y), doid(?Y,"DOID:162"^^), doid(?X, ?Xdoid) . -%cancerDisease(?Xdoid) :- diseaseHierarchy(?X,?Y), doid(?Y,"DOID:162"^^xsd:string), doid(?X, ?Xdoid) . -cancerDisease(?Xdoid) :- diseaseHierarchy(?X,?Y), doid(?Y,"DOID:162"), doid(?X, ?Xdoid) . - -humansWhoDiedOfCancer(?X) :- deathCause(?X,?Y), diseaseId(?Y,?Z), cancerDisease(?Z) . -humansWhoDiedOfNoncancer(?X) :- deathCause(?X,?Y), diseaseId(?Y,?Z), ~cancerDisease(?Z) . -humansWhoDiedOfNoncancer(?X) :- deathCause(?X,?y), ~hasDoid(?y) . - -humansWhoDiedOfCancer(?X) . -humansWhoDiedOfNoncancer(?X) . - diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExampleLocalSyntax.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExampleLocalSyntax.java index 36214382a..fda7df11f 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExampleLocalSyntax.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExampleLocalSyntax.java @@ -93,7 +93,7 @@ public static void main(final String[] args) RuleParser rp = new RuleParser(); try { - rp.parse(new FileInputStream(ExamplesUtils.INPUT_FOLDER + "/localSyntax/doid-example.txt")); + rp.parse(new FileInputStream(ExamplesUtils.INPUT_FOLDER + "/doid.rls")); } catch (ParsingException e) { System.out.println("Failed to parse rules: " + e.getMessage()); return; From eedcfaa25f4609f792ec82b52b25925aa67faf0b Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 15 Aug 2019 11:15:51 +0200 Subject: [PATCH 0058/1003] Updated example organisation --- .../vlog4j/examples/DoidExample.java | 69 ++++++--------- .../DoidExampleGraal.java} | 86 ++++++++++++------- 2 files changed, 79 insertions(+), 76 deletions(-) rename vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/{DoidExampleLocalSyntax.java => graal/DoidExampleGraal.java} (54%) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java index 0fbfaa20b..acccb3b38 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java @@ -20,19 +20,16 @@ * #L% */ -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePredicate; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeVariable; - import java.io.File; +import java.io.FileInputStream; import java.io.IOException; import java.net.URL; -import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.DataSource; +import org.semanticweb.vlog4j.core.reasoner.LogLevel; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; @@ -40,9 +37,8 @@ import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; -import org.semanticweb.vlog4j.graal.GraalToVLog4JModelConverter; - -import fr.lirmm.graphik.graal.io.dlp.DlgpParser; +import org.semanticweb.vlog4j.syntax.parser.ParsingException; +import org.semanticweb.vlog4j.syntax.parser.RuleParser; /** * This example reasons about human diseases, based on information from the @@ -64,9 +60,11 @@ public static void main(final String[] args) final URL wikidataSparqlEndpoint = new URL("https://query.wikidata.org/sparql"); try (final Reasoner reasoner = Reasoner.getInstance()) { + reasoner.setLogFile(ExamplesUtils.OUTPUT_FOLDER + "vlog.log"); + reasoner.setLogLevel(LogLevel.DEBUG); /* Configure RDF data source */ - final Predicate doidTriplePredicate = makePredicate("doidTriple", 3); + final Predicate doidTriplePredicate = Expressions.makePredicate("doidTriple", 3); final DataSource doidDataSource = new RdfFileDataSource( new File(ExamplesUtils.INPUT_FOLDER + "doid.nt.gz")); reasoner.addFactsFromDataSource(doidTriplePredicate, doidDataSource); @@ -93,34 +91,15 @@ public static void main(final String[] args) final Predicate recentDeathsCausePredicate = Expressions.makePredicate("recentDeathsCause", 2); reasoner.addFactsFromDataSource(recentDeathsCausePredicate, recentDeathsCauseDataSource); - /* Load rules from DLGP file */ - try (final DlgpParser parser = new DlgpParser( - new File(ExamplesUtils.INPUT_FOLDER + "/graal", "doid-example.dlgp"))) { - while (parser.hasNext()) { - final Object object = parser.next(); - if (object instanceof fr.lirmm.graphik.graal.api.core.Rule) { - reasoner.addRules( - GraalToVLog4JModelConverter.convertRule((fr.lirmm.graphik.graal.api.core.Rule) object)); - } - } + RuleParser ruleParser = new RuleParser(); + try { + ruleParser.parse(new FileInputStream(ExamplesUtils.INPUT_FOLDER + "/doid.rls")); + } catch (ParsingException e) { + System.out.println("Failed to parse rules: " + e.getMessage()); + return; } - /* Create additional rules with negated literals */ - final Variable x = makeVariable("X"); - final Variable y = makeVariable("Y"); - final Variable z = makeVariable("Z"); - // humansWhoDiedOfNoncancer(X):-deathCause(X,Y),diseaseId(Y,Z),~cancerDisease(Z) - final NegativeLiteral notCancerDisease = Expressions.makeNegativeLiteral("cancerDisease", z); - final PositiveLiteral diseaseId = Expressions.makePositiveLiteral("diseaseId", y, z); - final PositiveLiteral deathCause = Expressions.makePositiveLiteral("deathCause", x, y); - final PositiveLiteral humansWhoDiedOfNoncancer = Expressions.makePositiveLiteral("humansWhoDiedOfNoncancer", - x); - reasoner.addRules(Expressions.makeRule(Expressions.makePositiveConjunction(humansWhoDiedOfNoncancer), - Expressions.makeConjunction(deathCause, diseaseId, notCancerDisease))); - // humansWhoDiedOfNoncancer(X) :- deathCause(X,Y), ~hasDoid(Y) - final NegativeLiteral hasNotDoid = Expressions.makeNegativeLiteral("hasDoid", y); - reasoner.addRules(Expressions.makeRule(Expressions.makePositiveConjunction(humansWhoDiedOfNoncancer), - Expressions.makeConjunction(deathCause, hasNotDoid))); + reasoner.addRules(ruleParser.getRules()); System.out.println("Rules configured:\n--"); reasoner.getRules().forEach(System.out::println); @@ -129,17 +108,17 @@ public static void main(final String[] args) System.out.println("Loading completed."); System.out.println("Starting reasoning (including SPARQL query answering) ..."); reasoner.reason(); - System.out.println("... reasoning completed."); - - final PositiveLiteral humansWhoDiedOfCancer = Expressions.makePositiveLiteral("humansWhoDiedOfCancer", x); - final QueryResultIterator answersCancer = reasoner.answerQuery(humansWhoDiedOfCancer, true); - System.out.println( - "Humans in Wikidata who died in 2018 due to cancer: " + ExamplesUtils.iteratorSize(answersCancer)); - - final QueryResultIterator answersNoncancer = reasoner.answerQuery(humansWhoDiedOfNoncancer, true); - System.out.println("Humans in Wikidata who died in 2018 due to some other cause: " - + ExamplesUtils.iteratorSize(answersNoncancer)); + System.out.println("... reasoning completed.\n--"); + + System.out.println("Number of results in queries:"); + QueryResultIterator answers; + for (PositiveLiteral l : ruleParser.getQueries()) { + answers = reasoner.answerQuery(l, true); + System.out.print(l.toString()); + System.out.println(": " + ExamplesUtils.iteratorSize(answers)); + } System.out.println("Done."); + } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExampleLocalSyntax.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java similarity index 54% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExampleLocalSyntax.java rename to vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java index fda7df11f..7918490f7 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExampleLocalSyntax.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.examples; +package org.semanticweb.vlog4j.examples.graal; /*- * #%L @@ -20,16 +20,19 @@ * #L% */ +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePredicate; +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeVariable; + import java.io.File; -import java.io.FileInputStream; import java.io.IOException; import java.net.URL; +import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.DataSource; -import org.semanticweb.vlog4j.core.reasoner.LogLevel; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; @@ -37,20 +40,24 @@ import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; -import org.semanticweb.vlog4j.syntax.parser.ParsingException; -import org.semanticweb.vlog4j.syntax.parser.RuleParser; +import org.semanticweb.vlog4j.examples.DoidExample; +import org.semanticweb.vlog4j.examples.ExamplesUtils; +import org.semanticweb.vlog4j.graal.GraalToVLog4JModelConverter; + +import fr.lirmm.graphik.graal.io.dlp.DlgpParser; /** - * This example reasons about human diseases, based on information from the - * Disease Ontology (DOID) and Wikidata. It illustrates how to load data from - * different sources (RDF file, SPARQL), and reason about these inputs using - * rules that are loaded from a file. The rules used here employ existential - * quantifiers and stratified negation. + * This example is a variant of {@link DoidExample} using Graal. It reasons + * about human diseases, based on information from the Disease Ontology (DOID) + * and Wikidata. It illustrates how to load data from different sources (RDF + * file, SPARQL), and reason about these inputs using rules that are loaded from + * a filein DLGP syntax. Since DLGP doesnot support negation, an additional rule + * with stratified negation is added through custom Java code. * * @author Markus Kroetzsch * @author Larry Gonzalez */ -public class DoidExampleLocalSyntax { +public class DoidExampleGraal { public static void main(final String[] args) throws ReasonerStateException, IOException, EdbIdbSeparationException, IncompatiblePredicateArityException { @@ -60,11 +67,9 @@ public static void main(final String[] args) final URL wikidataSparqlEndpoint = new URL("https://query.wikidata.org/sparql"); try (final Reasoner reasoner = Reasoner.getInstance()) { - reasoner.setLogFile(ExamplesUtils.OUTPUT_FOLDER + "vlog.log"); - reasoner.setLogLevel(LogLevel.DEBUG); /* Configure RDF data source */ - final Predicate doidTriplePredicate = Expressions.makePredicate("doidTriple", 3); + final Predicate doidTriplePredicate = makePredicate("doidTriple", 3); final DataSource doidDataSource = new RdfFileDataSource( new File(ExamplesUtils.INPUT_FOLDER + "doid.nt.gz")); reasoner.addFactsFromDataSource(doidTriplePredicate, doidDataSource); @@ -91,15 +96,34 @@ public static void main(final String[] args) final Predicate recentDeathsCausePredicate = Expressions.makePredicate("recentDeathsCause", 2); reasoner.addFactsFromDataSource(recentDeathsCausePredicate, recentDeathsCauseDataSource); - RuleParser rp = new RuleParser(); - try { - rp.parse(new FileInputStream(ExamplesUtils.INPUT_FOLDER + "/doid.rls")); - } catch (ParsingException e) { - System.out.println("Failed to parse rules: " + e.getMessage()); - return; + /* Load rules from DLGP file */ + try (final DlgpParser parser = new DlgpParser( + new File(ExamplesUtils.INPUT_FOLDER + "/graal", "doid-example.dlgp"))) { + while (parser.hasNext()) { + final Object object = parser.next(); + if (object instanceof fr.lirmm.graphik.graal.api.core.Rule) { + reasoner.addRules( + GraalToVLog4JModelConverter.convertRule((fr.lirmm.graphik.graal.api.core.Rule) object)); + } + } } - reasoner.addRules(rp.getRules()); + /* Create additional rules with negated literals */ + final Variable x = makeVariable("X"); + final Variable y = makeVariable("Y"); + final Variable z = makeVariable("Z"); + // humansWhoDiedOfNoncancer(X):-deathCause(X,Y),diseaseId(Y,Z),~cancerDisease(Z) + final NegativeLiteral notCancerDisease = Expressions.makeNegativeLiteral("cancerDisease", z); + final PositiveLiteral diseaseId = Expressions.makePositiveLiteral("diseaseId", y, z); + final PositiveLiteral deathCause = Expressions.makePositiveLiteral("deathCause", x, y); + final PositiveLiteral humansWhoDiedOfNoncancer = Expressions.makePositiveLiteral("humansWhoDiedOfNoncancer", + x); + reasoner.addRules(Expressions.makeRule(Expressions.makePositiveConjunction(humansWhoDiedOfNoncancer), + Expressions.makeConjunction(deathCause, diseaseId, notCancerDisease))); + // humansWhoDiedOfNoncancer(X) :- deathCause(X,Y), ~hasDoid(Y) + final NegativeLiteral hasNotDoid = Expressions.makeNegativeLiteral("hasDoid", y); + reasoner.addRules(Expressions.makeRule(Expressions.makePositiveConjunction(humansWhoDiedOfNoncancer), + Expressions.makeConjunction(deathCause, hasNotDoid))); System.out.println("Rules configured:\n--"); reasoner.getRules().forEach(System.out::println); @@ -108,17 +132,17 @@ public static void main(final String[] args) System.out.println("Loading completed."); System.out.println("Starting reasoning (including SPARQL query answering) ..."); reasoner.reason(); - System.out.println("... reasoning completed.\n--"); - - System.out.println("Number of results in queries:"); - QueryResultIterator answers; - for (PositiveLiteral l : rp.getQueries()) { - answers = reasoner.answerQuery(l, true); - System.out.print(l.toString()); - System.out.println(": " + ExamplesUtils.iteratorSize(answers)); - } - System.out.println("Done."); + System.out.println("... reasoning completed."); + + final PositiveLiteral humansWhoDiedOfCancer = Expressions.makePositiveLiteral("humansWhoDiedOfCancer", x); + final QueryResultIterator answersCancer = reasoner.answerQuery(humansWhoDiedOfCancer, true); + System.out.println( + "Humans in Wikidata who died in 2018 due to cancer: " + ExamplesUtils.iteratorSize(answersCancer)); + final QueryResultIterator answersNoncancer = reasoner.answerQuery(humansWhoDiedOfNoncancer, true); + System.out.println("Humans in Wikidata who died in 2018 due to some other cause: " + + ExamplesUtils.iteratorSize(answersNoncancer)); + System.out.println("Done."); } } From 4071177b255251cc417cb01e35a079c8292df583 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 15 Aug 2019 11:16:14 +0200 Subject: [PATCH 0059/1003] typos --- .../semanticweb/vlog4j/examples/graal/DoidExampleGraal.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java index 7918490f7..30d13cc47 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java @@ -51,8 +51,8 @@ * about human diseases, based on information from the Disease Ontology (DOID) * and Wikidata. It illustrates how to load data from different sources (RDF * file, SPARQL), and reason about these inputs using rules that are loaded from - * a filein DLGP syntax. Since DLGP doesnot support negation, an additional rule - * with stratified negation is added through custom Java code. + * a file in DLGP syntax. Since DLGP does not support negation, an additional + * rule with stratified negation is added through custom Java code. * * @author Markus Kroetzsch * @author Larry Gonzalez From 0b23882a3f14331ac4dfbc07740deb0f38d96b45 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 15 Aug 2019 11:33:44 +0200 Subject: [PATCH 0060/1003] fixed rule --- .../org/semanticweb/vlog4j/examples/SimpleExampleJavaCC.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleExampleJavaCC.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleExampleJavaCC.java index d34dd2170..b547756a6 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleExampleJavaCC.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleExampleJavaCC.java @@ -44,7 +44,7 @@ public static void main(final String[] args) rules += "

    () . \n"; rules += "(?x) :-

    (?x) . \n"; rules += "(?y) . \n"; - rules += "(?x,?y) :- (?x) . \n"; + rules += "(?x,!y) :- (?x) . \n"; rules += "(?x,?y) . \n"; RuleParser rp = new RuleParser(); From 0cd04ce0df220d0cb5fe9ca6f8e18e8a31d4b4f0 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 15 Aug 2019 11:35:28 +0200 Subject: [PATCH 0061/1003] Rely on snapshot version of vlog --- vlog4j-core/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vlog4j-core/pom.xml b/vlog4j-core/pom.xml index d2d7d3a8c..9cc02a677 100644 --- a/vlog4j-core/pom.xml +++ b/vlog4j-core/pom.xml @@ -16,7 +16,7 @@ Core components of VLog4j: reasoner and model - 1.2.1 + 1.2.2-SNAPSHOT From 7e78c7ce0d0a42717ba1a578f168e26dd32a9e7f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Markus=20Kr=C3=B6tzsch?= Date: Thu, 15 Aug 2019 11:39:37 +0200 Subject: [PATCH 0062/1003] Use snapshot version of vlog for CI --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 42f067d5b..e531b674c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -16,7 +16,7 @@ before_install: - sudo apt-get install gcc-5 -y # - eval “CC=gcc-5 && CXX=g++-5” ## Uncomment line below to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar -# - sh ./build-vlog-library.sh + - sh ./build-vlog-library.sh install: mvn install $OPTIONS -DskipTests=true From 61c2debf37f03235cfd72127550ef0d8f6856c7c Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 15 Aug 2019 12:22:01 +0200 Subject: [PATCH 0063/1003] Use different constant ID conversion to match new parser --- .../implementation/TermToVLogConverter.java | 8 +++++++- .../implementation/VLogToModelConverter.java | 17 +++++++++-------- 2 files changed, 16 insertions(+), 9 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/TermToVLogConverter.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/TermToVLogConverter.java index 10de85244..509b5978d 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/TermToVLogConverter.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/TermToVLogConverter.java @@ -40,7 +40,13 @@ class TermToVLogConverter implements TermVisitor { */ @Override public karmaresearch.vlog.Term visit(Constant term) { - return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, term.getName()); + if (term.getName().startsWith("\"")) { // keep datatype literal strings unchanged + return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, term.getName()); + } else if (term.getName().contains(":")) { // enclose IRIs with < > + return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, "<" + term.getName() + ">"); + } else { // keep relative IRIs unchanged + return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, term.getName()); + } } /** diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverter.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverter.java index 113f8a2b8..7d038a906 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverter.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverter.java @@ -43,8 +43,7 @@ class VLogToModelConverter { * Converts internal VLog query results (represented as arrays of * {@link karmaresearch.vlog.Term}s) into VLog model API QueryResults. * - * @param vLogQueryResult - * an array of terms that represent an answer to a query. + * @param vLogQueryResult an array of terms that represent an answer to a query. * @return a QueryResult containing the corresponding {@code vLogQueryResult} as * a List of {@link Term}s. */ @@ -56,9 +55,8 @@ static QueryResult toQueryResult(karmaresearch.vlog.Term[] vLogQueryResult) { * Converts an array of internal VLog terms ({@link karmaresearch.vlog.Term}) * into the corresponding list of VLog API model {@link Term}. * - * @param vLogTerms - * input terms array, to be converted to a list of corresponding - * {@link Term}s. + * @param vLogTerms input terms array, to be converted to a list of + * corresponding {@link Term}s. * @return list of {@link Term}s, where each element corresponds to the element * in given {@code vLogTerms} at the same position. */ @@ -74,8 +72,7 @@ static List toTermList(karmaresearch.vlog.Term[] vLogTerms) { * Converts an internal VLog term ({@link karmaresearch.vlog.Term}) to a VLog * API model {@link Term} of the same type and name. * - * @param vLogTerm - * term to be converted + * @param vLogTerm term to be converted * @return a ({@link karmaresearch.vlog.Term}) with the same name as given * {@code vLogTerm} and of the corresponding type. */ @@ -83,7 +80,11 @@ static Term toTerm(karmaresearch.vlog.Term vLogTerm) { String name = vLogTerm.getName(); switch (vLogTerm.getTermType()) { case CONSTANT: - return new ConstantImpl(name); + if (name.charAt(0) == '<' && name.charAt(name.length() - 1) == '>') { // strip <> off IRIs + return new ConstantImpl(name.substring(1, name.length() - 1)); + } else { + return new ConstantImpl(name); + } case BLANK: return new BlankImpl(name); case VARIABLE: From d46fded74c2c4a3953f9ddff4fc29a9493956456 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 15 Aug 2019 12:22:11 +0200 Subject: [PATCH 0064/1003] Update to new parser support --- .../examples/core/AddDataFromRdfFile.java | 97 +++++++------------ 1 file changed, 33 insertions(+), 64 deletions(-) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java index 315e72e18..6a6a9f0f4 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java @@ -1,8 +1,6 @@ package org.semanticweb.vlog4j.examples.core; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeConjunction; import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeConstant; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePositiveConjunction; /*- * #%L @@ -26,7 +24,6 @@ import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePositiveLiteral; import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePredicate; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeRule; import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeVariable; import java.io.File; @@ -35,7 +32,6 @@ import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.reasoner.DataSource; import org.semanticweb.vlog4j.core.reasoner.Reasoner; @@ -44,6 +40,8 @@ import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.examples.ExamplesUtils; +import org.semanticweb.vlog4j.syntax.parser.ParsingException; +import org.semanticweb.vlog4j.syntax.parser.RuleParser; /** * This example shows how facts can be imported from files in the RDF N-Triples @@ -71,69 +69,38 @@ public class AddDataFromRdfFile { public static void main(final String[] args) throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { - /* 1. Instantiating entities and rules. */ - final Predicate triplesEDB = makePredicate("triplesEDB", 3); - final Predicate triplesIDB = makePredicate("triplesIDB", 3); - - final Constant hasPartPredicate = makeConstant(""); - final Constant isPartOfPredicate = makeConstant(""); - final Constant hasTypePredicate = makeConstant(""); - final Constant bicycleObject = makeConstant(""); - final Constant wheelObject = makeConstant(""); - - final Variable x = makeVariable("x"); - final Variable s = makeVariable("s"); - final Variable p = makeVariable("p"); - final Variable o = makeVariable("o"); - - /* - * We will write <~/someName> instead of and - * <~#someName> instead of . - * - * triplesIDB(?s, ?p, ?o) :- triplesEDB(?s, ?p, ?o) . - */ - final PositiveLiteral factIDB = makePositiveLiteral(triplesIDB, s, p, o); - final PositiveLiteral factEDB = makePositiveLiteral(triplesEDB, s, p, o); - final Rule rule1 = makeRule(factIDB, factEDB); - - /* - * exists x. triplesIDB(?s, <~/hasPart>, !x), triplesIDB(!x, <~#type>, - * <~/wheel>) :- triplesIDB(?s, <~#type>, <~/bicycle>) . - */ - final PositiveLiteral existsHasPartIDB = makePositiveLiteral(triplesIDB, s, hasPartPredicate, x); - final PositiveLiteral existsWheelIDB = makePositiveLiteral(triplesIDB, x, hasTypePredicate, wheelObject); - final PositiveLiteral bicycleIDB = makePositiveLiteral(triplesIDB, s, hasTypePredicate, bicycleObject); - final Rule rule2 = makeRule(makePositiveConjunction(existsHasPartIDB, existsWheelIDB), - makeConjunction(bicycleIDB)); - - /* - * exists x. triplesIDB(?s, <~/isPartOf>, !x) :- triplesIDB(?s, <~#type>, - * <~/wheel>) . - */ - final PositiveLiteral existsIsPartOfIDB = makePositiveLiteral(triplesIDB, s, isPartOfPredicate, x); - final PositiveLiteral wheelIDB = makePositiveLiteral(triplesIDB, s, hasTypePredicate, wheelObject); - final Rule rule3 = makeRule(makePositiveConjunction(existsIsPartOfIDB), makeConjunction(wheelIDB)); - - /* - * triplesIDB(?s, <~/isPartOf>, ?o) :- triplesIDB(?o, <~/hasPart>, ?s) . - */ - final PositiveLiteral isPartOfIDB = makePositiveLiteral(triplesIDB, s, isPartOfPredicate, o); - final PositiveLiteral hasPartIDBReversed = makePositiveLiteral(triplesIDB, o, hasPartPredicate, s); - final Rule rule4 = makeRule(isPartOfIDB, hasPartIDBReversed); - - /* - * triplesIDB(?s, <~/hasPart>, ?o) :- triplesIDB(?o, <~/isPartOf>, ?s) . - */ - final PositiveLiteral hasPartIDB = makePositiveLiteral(triplesIDB, s, hasPartPredicate, o); - final PositiveLiteral isPartOfIDBReversed = makePositiveLiteral(triplesIDB, o, isPartOfPredicate, s); - final Rule rule5 = makeRule(hasPartIDB, isPartOfIDBReversed); + /* 1. Prepare rules and create some related vocabulary objects used later */ + final Predicate triplesEDB = makePredicate("triplesEDB", 3); // predicate to load RDF + final Predicate triplesIDB = makePredicate("triplesIDB", 3); // predicate for inferred triples + final Constant hasPartPredicate = makeConstant("https://example.org/hasPart"); // RDF property used in query + + final String rules = "%%%% We specify the rules syntactically for convenience %%%\n" + + "@prefix ex: ." + + "@prefix rdf: ." + // load all triples from file: + + "triplesIDB(?s, ?p, ?o) :- triplesEDB(?s, ?p, ?o) ." + // every bicycle has some part that is a wheel: + + "triplesIDB(?s, ex:hasPart, !x), triplesIDB(!x, rdf:type, ex:wheel) :- triplesIDB(?s, rdf:type, ex:bicycle) ." + // every wheel is part of some bicycle: + + "triplesIDB(?s, ex:isPartOf, !x) :- triplesIDB(?s, rdf:type, ex:wheel) ." + // hasPart and isPartOf are mutually inverse relations: + + "triplesIDB(?s, ex:isPartOf, ?o) :- triplesIDB(?o, ex:hasPart, ?s) ." + + "triplesIDB(?s, ex:hasPart, ?o) :- triplesIDB(?o, ex:isPartOf, ?s) ."; + + RuleParser ruleParser = new RuleParser(); + try { + ruleParser.parse(rules); + } catch (ParsingException e) { + System.out.println("Failed to parse rules: " + e.getMessage()); + return; + } /* * 2. Loading, reasoning, querying and exporting, while using try-with-resources * to close the reasoner automatically. */ try (final Reasoner reasoner = Reasoner.getInstance()) { - reasoner.addRules(rule1, rule2, rule3, rule4, rule5); + reasoner.addRules(ruleParser.getRules()); /* Importing {@code .nt.gz} file as data source. */ final DataSource triplesEDBDataSource = new RdfFileDataSource( @@ -142,13 +109,15 @@ public static void main(final String[] args) reasoner.load(); System.out.println("Before materialisation:"); - /* triplesEDB(?s, <~/hasPart>, ?o) */ - final PositiveLiteral hasPartEDB = makePositiveLiteral(triplesEDB, s, hasPartPredicate, o); + final Variable x = makeVariable("x"); + final Variable y = makeVariable("y"); + final PositiveLiteral hasPartEDB = makePositiveLiteral(triplesEDB, x, hasPartPredicate, y); ExamplesUtils.printOutQueryAnswers(hasPartEDB, reasoner); /* The reasoner will use the Restricted Chase by default. */ reasoner.reason(); System.out.println("After materialisation:"); + final PositiveLiteral hasPartIDB = makePositiveLiteral(triplesIDB, x, hasPartPredicate, y); ExamplesUtils.printOutQueryAnswers(hasPartIDB, reasoner); /* Exporting query answers to {@code .csv} files. */ @@ -157,7 +126,7 @@ public static void main(final String[] args) reasoner.exportQueryAnswersToCsv(hasPartIDB, ExamplesUtils.OUTPUT_FOLDER + "ternaryHasPartIDBWithoutBlanks.csv", false); - final Constant redBikeSubject = makeConstant(""); + final Constant redBikeSubject = makeConstant("https://example.org/redBike"); final PositiveLiteral existsHasPartRedBike = makePositiveLiteral(triplesIDB, redBikeSubject, hasPartPredicate, x); reasoner.exportQueryAnswersToCsv(existsHasPartRedBike, From 1d6644fd8e8dfe9051a262961852c5210aeecc4d Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 15 Aug 2019 12:33:28 +0200 Subject: [PATCH 0065/1003] Adapted example to new parser code --- .../examples/core/AddDataFromCsvFile.java | 98 +++++++------------ 1 file changed, 33 insertions(+), 65 deletions(-) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java index e52302d3d..61bb482b7 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java @@ -1,8 +1,6 @@ package org.semanticweb.vlog4j.examples.core; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeConjunction; import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeConstant; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePositiveConjunction; /*- * #%L @@ -26,7 +24,6 @@ import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePositiveLiteral; import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePredicate; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeRule; import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeVariable; import java.io.File; @@ -35,7 +32,6 @@ import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.reasoner.DataSource; import org.semanticweb.vlog4j.core.reasoner.Reasoner; @@ -44,6 +40,8 @@ import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.vlog4j.examples.ExamplesUtils; +import org.semanticweb.vlog4j.syntax.parser.ParsingException; +import org.semanticweb.vlog4j.syntax.parser.RuleParser; /** * This example shows how facts can be imported from files in the CSV format. @@ -67,73 +65,39 @@ public class AddDataFromCsvFile { public static void main(final String[] args) throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { - /* 1. Instantiating entities and rules. */ - final Predicate bicycleIDB = makePredicate("BicycleIDB", 1); - final Predicate bicycleEDB = makePredicate("BicycleEDB", 1); - final Predicate wheelIDB = makePredicate("WheelIDB", 1); - final Predicate wheelEDB = makePredicate("WheelEDB", 1); - final Predicate hasPartIDB = makePredicate("HasPartIDB", 2); - final Predicate hasPartEDB = makePredicate("HasPartEDB", 2); - final Predicate isPartOfIDB = makePredicate("IsPartOfIDB", 2); - final Predicate isPartOfEDB = makePredicate("IsPartOfEDB", 2); - final Variable x = makeVariable("x"); - final Variable y = makeVariable("y"); - - /* - * BicycleIDB(?x) :- BicycleEDB(?x) . - */ - final PositiveLiteral bicycleIDBX = makePositiveLiteral(bicycleIDB, x); - final PositiveLiteral bicycleEDBX = makePositiveLiteral(bicycleEDB, x); - final Rule rule1 = makeRule(bicycleIDBX, bicycleEDBX); - - /* - * WheelIDB(?x) :- WheelEDB(?x) . - */ - final PositiveLiteral wheelIDBX = makePositiveLiteral(wheelIDB, x); - final PositiveLiteral wheelEDBX = makePositiveLiteral(wheelEDB, x); - final Rule rule2 = makeRule(wheelIDBX, wheelEDBX); - - /* - * hasPartIDB(?x, ?y) :- hasPartEDB(?x, ?y) . - */ - final PositiveLiteral hasPartIDBXY = makePositiveLiteral(hasPartIDB, x, y); - final PositiveLiteral hasPartEDBXY = makePositiveLiteral(hasPartEDB, x, y); - final Rule rule3 = makeRule(hasPartIDBXY, hasPartEDBXY); - - /* - * isPartOfIDB(?x, ?y) :- isPartOfEDB(?x, ?y) . - */ - final PositiveLiteral isPartOfIDBXY = makePositiveLiteral(isPartOfIDB, x, y); - final PositiveLiteral isPartOfEDBXY = makePositiveLiteral(isPartOfEDB, x, y); - final Rule rule4 = makeRule(isPartOfIDBXY, isPartOfEDBXY); - - /* - * exists y. HasPartIDB(?x, !y), WheelIDB(!y) :- BicycleIDB(?x) . - */ - final PositiveLiteral wheelIDBY = makePositiveLiteral(wheelIDB, y); - final Rule rule5 = makeRule(makePositiveConjunction(hasPartIDBXY, wheelIDBY), makeConjunction(bicycleIDBX)); - - /* - * exists y. IsPartOfIDB(?x, !y) :- WheelIDB(?x) . - */ - final Rule rule6 = makeRule(makePositiveConjunction(isPartOfIDBXY), makeConjunction(wheelIDBX)); - - /* IsPartOfIDB(?x, ?y) :- HasPartIDB(?y, ?x) . */ - final PositiveLiteral hasPartIDBYX = makePositiveLiteral(hasPartIDB, y, x); - final Rule rule7 = makeRule(isPartOfIDBXY, hasPartIDBYX); - - /* - * HasPartIDB(?x, ?y) :- IsPartOfIDB(?y, ?x) . - */ - final PositiveLiteral isPartOfIDBYX = makePositiveLiteral(isPartOfIDB, y, x); - final Rule rule8 = makeRule(hasPartIDBXY, isPartOfIDBYX); + /* 1. Prepare rules and create some related vocabulary objects used later. */ + final Predicate bicycleEDB = makePredicate("bicycleEDB", 1); + final Predicate wheelEDB = makePredicate("wheelEDB", 1); + final Predicate hasPartIDB = makePredicate("hasPartIDB", 2); + final Predicate hasPartEDB = makePredicate("hasPartEDB", 2); + + final String rules = "%%%% We specify the rules syntactically for convenience %%%\n" + // load all data from the file-based ("EDB") predicates: + + "bicycleIDB(?x) :- bicycleEDB(?x) ." // + + "wheelIDB(?x) :- wheelEDB(?x) ." // + + "hasPartIDB(?x, ?y) :- hasPartEDB(?x, ?y) ." // + + "isPartOfIDB(?x, ?y) :- isPartOfEDB(?x, ?y) ." + // every bicycle has some part that is a wheel: + + "hasPartIDB(?x, !y), wheelIDB(!y) :- bicycleIDB(?x) ." + // every wheel is part of some bicycle: + + "isPartOfIDB(?x, !y) :- wheelIDB(?x) ." + // hasPart and isPartOf are mutually inverse relations: + + "hasPartIDB(?x, ?y) :- isPartOfIDB(?y, ?x) ." + "isPartOfIDB(?x, ?y) :- hasPartIDB(?y, ?x) ."; + + RuleParser ruleParser = new RuleParser(); + try { + ruleParser.parse(rules); + } catch (ParsingException e) { + System.out.println("Failed to parse rules: " + e.getMessage()); + return; + } /* * 2. Loading, reasoning, and querying while using try-with-resources to close * the reasoner automatically. */ try (final Reasoner reasoner = Reasoner.getInstance()) { - reasoner.addRules(rule1, rule2, rule3, rule4, rule5, rule6, rule7, rule8); + reasoner.addRules(ruleParser.getRules()); /* Importing {@code .csv} files as data sources. */ final DataSource bicycleEDBDataSource = new CsvFileDataSource( @@ -148,11 +112,15 @@ public static void main(final String[] args) reasoner.load(); System.out.println("Before materialisation:"); + final Variable x = makeVariable("x"); + final Variable y = makeVariable("y"); + final PositiveLiteral hasPartEDBXY = makePositiveLiteral(hasPartEDB, x, y); ExamplesUtils.printOutQueryAnswers(hasPartEDBXY, reasoner); /* The reasoner will use the Restricted Chase by default. */ reasoner.reason(); System.out.println("After materialisation:"); + final PositiveLiteral hasPartIDBXY = makePositiveLiteral(hasPartIDB, x, y); ExamplesUtils.printOutQueryAnswers(hasPartIDBXY, reasoner); /* 3. Exporting query answers to {@code .csv} files. */ From 55eaec26def45bbe1cdc9eaa29322351e60f6613 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 15 Aug 2019 12:33:34 +0200 Subject: [PATCH 0066/1003] typo --- .../semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java index 6a6a9f0f4..2853ec560 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java @@ -69,7 +69,7 @@ public class AddDataFromRdfFile { public static void main(final String[] args) throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { - /* 1. Prepare rules and create some related vocabulary objects used later */ + /* 1. Prepare rules and create some related vocabulary objects used later. */ final Predicate triplesEDB = makePredicate("triplesEDB", 3); // predicate to load RDF final Predicate triplesIDB = makePredicate("triplesIDB", 3); // predicate for inferred triples final Constant hasPartPredicate = makeConstant("https://example.org/hasPart"); // RDF property used in query From 2facee9e9c2f8f79bec7a0495fefcc85c9179ebf Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Thu, 15 Aug 2019 12:51:14 +0200 Subject: [PATCH 0067/1003] add boolean literal support --- .../implementation/javacc/JavaCCRuleParser.jj | 35 ++++++++----------- .../vlog4j/syntax/parser/RuleParserTest.java | 9 +++++ 2 files changed, 24 insertions(+), 20 deletions(-) diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj index ddd39a6ba..d754117aa 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj @@ -28,13 +28,7 @@ import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Constant; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePositiveLiteral; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeNegativeLiteral; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePositiveConjunction; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeConjunction; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeRule; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeVariable; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeConstant; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; public class JavaCCRuleParser extends RuleParserBase @@ -115,7 +109,7 @@ Rule rule() throws PrefixDeclarationException: // 2 rule's body head = listOfPositiveLiterals(1) < ARROW > body = listOfLiterals(2) < DOT > { - rule = makeRule(makePositiveConjunction(head), makeConjunction(body)); + rule = Expressions.makeRule(Expressions.makePositiveConjunction(head), Expressions.makeConjunction(body)); // check that the intersection between headExiVars and BodyVars is empty for (String variable : headExiVars) { @@ -172,7 +166,7 @@ PositiveLiteral positiveLiteral(int itComesFrom) throws PrefixDeclarationExcepti } { predicateName = predicateName() < LPAREN > terms = listOfTerms(itComesFrom) < RPAREN > - { return makePositiveLiteral(predicateName, terms); } + { return Expressions.makePositiveLiteral(predicateName, terms); } } NegativeLiteral negativeLiteral(int itComesFrom) throws PrefixDeclarationException: @@ -182,7 +176,7 @@ NegativeLiteral negativeLiteral(int itComesFrom) throws PrefixDeclarationExcepti } { < TILDE > predicateName = predicateName() < LPAREN > terms = listOfTerms(itComesFrom) < RPAREN > - { return makeNegativeLiteral(predicateName, terms); } + { return Expressions.makeNegativeLiteral(predicateName, terms); } } List < Term > listOfTerms(int itComesFrom) throws PrefixDeclarationException: @@ -216,8 +210,9 @@ Term term(int itComesFrom) throws PrefixDeclarationException: Token t; } { - s = IRI(false) { return makeConstant(s); } -| s = RDFLiteral() { return makeConstant(s); } + s = IRI(false) { return Expressions.makeConstant(s); } +| LOOKAHEAD(booleanLiteral()) s = booleanLiteral() { return Expressions.makeConstant(s); } +| s = RDFLiteral() { return Expressions.makeConstant(s); } | t = < UNIVAR > { s = t.image.substring(1); @@ -225,7 +220,7 @@ Term term(int itComesFrom) throws PrefixDeclarationException: headUniVars.add(s); if (itComesFrom == 2) bodyVars.add(s); - return makeVariable(t.image.substring(1)); + return Expressions.makeVariable(t.image.substring(1)); } | t = < EXIVAR > { @@ -234,9 +229,9 @@ Term term(int itComesFrom) throws PrefixDeclarationException: headExiVars.add(s); if (itComesFrom == 2) throw new ParseException("Existentialy quantified variables can not appear in the body. Line: " + t.beginLine + ", Column: "+ t.beginColumn); - return makeVariable(t.image.substring(1)); + return Expressions.makeVariable(t.image.substring(1)); } -| t = < VARORPREDNAME > { return makeConstant(prefixDeclarations.absolutize(t.image));} +| t = < VARORPREDNAME > { return Expressions.makeConstant(prefixDeclarations.absolutize(t.image));} } /** [16] */ @@ -277,12 +272,12 @@ String Langtag() : } } -String BooleanLiteral() : +String booleanLiteral() : { } { - < TRUE > { return "true^^http://www.w3.org/2001/XMLSchema#boolean"; } -| < FALSE > { return "false^^http://www.w3.org/2001/XMLSchema#boolean"; } + < TRUE > { return "\"true\"^^"; } +| < FALSE > { return "\"false\"^^"; } } String String(): @@ -388,8 +383,8 @@ TOKEN : TOKEN [ IGNORE_CASE ] : { - < TRUE : "true" > -| < FALSE : "false" > + < TRUE : "'true'" > +| < FALSE : "'false'" > // ------------------------------------------------- | < INTEGER : ([ "-", "+" ])? < DIGITS > > | diff --git a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 7538a309b..4947196d6 100644 --- a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -170,6 +170,15 @@ public void testNoDollarVariables() throws ParsingException { ruleParser.parse(input); } + @Test + public void testBooleanLiteral() throws ParsingException { + String input = "p('true') ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + PositiveLiteral trueLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("\"true\"^^<" + PrefixDeclarations.XSD_BOOLEAN + ">")); + assertEquals(Arrays.asList(trueLiteral), ruleParser.getFacts()); + } + @Test public void testStringLiteral() throws ParsingException { String input = "p(\"abc\") ."; From 571c061c31725b768394714c0e8ecac90e48dfb4 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Thu, 15 Aug 2019 12:52:40 +0200 Subject: [PATCH 0068/1003] test true and false boolean literals --- .../vlog4j/syntax/parser/RuleParserTest.java | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 4947196d6..bb9302c5e 100644 --- a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -171,7 +171,7 @@ public void testNoDollarVariables() throws ParsingException { } @Test - public void testBooleanLiteral() throws ParsingException { + public void testTrueBooleanLiteral() throws ParsingException { String input = "p('true') ."; RuleParser ruleParser = new RuleParser(); ruleParser.parse(input); @@ -179,6 +179,15 @@ public void testBooleanLiteral() throws ParsingException { assertEquals(Arrays.asList(trueLiteral), ruleParser.getFacts()); } + @Test + public void testFalseBooleanLiteral() throws ParsingException { + String input = "p('false') ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + PositiveLiteral trueLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("\"false\"^^<" + PrefixDeclarations.XSD_BOOLEAN + ">")); + assertEquals(Arrays.asList(trueLiteral), ruleParser.getFacts()); + } + @Test public void testStringLiteral() throws ParsingException { String input = "p(\"abc\") ."; From 8c66637bc7c04336419a0091d8040633c6adb839 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 15 Aug 2019 12:54:22 +0200 Subject: [PATCH 0069/1003] Style improvements, proper logging --- .../examples/core/AddDataFromCsvFile.java | 21 ++++++----- .../examples/core/AddDataFromRdfFile.java | 15 ++++---- .../core/AddDataFromSparqlQueryResults.java | 36 +++++++++---------- .../SkolemVsRestrictedChaseTermination.java | 6 ++++ 4 files changed, 44 insertions(+), 34 deletions(-) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java index 61bb482b7..8cdeb89c6 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java @@ -65,6 +65,8 @@ public class AddDataFromCsvFile { public static void main(final String[] args) throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + ExamplesUtils.configureLogging(); + /* 1. Prepare rules and create some related vocabulary objects used later. */ final Predicate bicycleEDB = makePredicate("bicycleEDB", 1); final Predicate wheelEDB = makePredicate("wheelEDB", 1); @@ -73,16 +75,17 @@ public static void main(final String[] args) final String rules = "%%%% We specify the rules syntactically for convenience %%%\n" // load all data from the file-based ("EDB") predicates: - + "bicycleIDB(?x) :- bicycleEDB(?x) ." // - + "wheelIDB(?x) :- wheelEDB(?x) ." // - + "hasPartIDB(?x, ?y) :- hasPartEDB(?x, ?y) ." // - + "isPartOfIDB(?x, ?y) :- isPartOfEDB(?x, ?y) ." + + "bicycleIDB(?X) :- bicycleEDB(?X) ." // + + "wheelIDB(?X) :- wheelEDB(?X) ." // + + "hasPartIDB(?X, ?Y) :- hasPartEDB(?X, ?Y) ." // + + "isPartOfIDB(?X, ?Y) :- isPartOfEDB(?X, ?Y) ." // every bicycle has some part that is a wheel: - + "hasPartIDB(?x, !y), wheelIDB(!y) :- bicycleIDB(?x) ." + + "hasPartIDB(?X, !Y), wheelIDB(!Y) :- bicycleIDB(?X) ." // every wheel is part of some bicycle: - + "isPartOfIDB(?x, !y) :- wheelIDB(?x) ." + + "isPartOfIDB(?X, !Y) :- wheelIDB(?X) ." // hasPart and isPartOf are mutually inverse relations: - + "hasPartIDB(?x, ?y) :- isPartOfIDB(?y, ?x) ." + "isPartOfIDB(?x, ?y) :- hasPartIDB(?y, ?x) ."; + + "hasPartIDB(?X, ?Y) :- isPartOfIDB(?Y, ?X) ." // + + "isPartOfIDB(?X, ?Y) :- hasPartIDB(?Y, ?X) ."; RuleParser ruleParser = new RuleParser(); try { @@ -112,8 +115,8 @@ public static void main(final String[] args) reasoner.load(); System.out.println("Before materialisation:"); - final Variable x = makeVariable("x"); - final Variable y = makeVariable("y"); + final Variable x = makeVariable("X"); + final Variable y = makeVariable("Y"); final PositiveLiteral hasPartEDBXY = makePositiveLiteral(hasPartEDB, x, y); ExamplesUtils.printOutQueryAnswers(hasPartEDBXY, reasoner); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java index 2853ec560..3edec03f6 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java @@ -68,6 +68,7 @@ public class AddDataFromRdfFile { public static void main(final String[] args) throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + ExamplesUtils.configureLogging(); /* 1. Prepare rules and create some related vocabulary objects used later. */ final Predicate triplesEDB = makePredicate("triplesEDB", 3); // predicate to load RDF @@ -78,14 +79,14 @@ public static void main(final String[] args) + "@prefix ex: ." + "@prefix rdf: ." // load all triples from file: - + "triplesIDB(?s, ?p, ?o) :- triplesEDB(?s, ?p, ?o) ." + + "triplesIDB(?S, ?P, ?O) :- triplesEDB(?S, ?P, ?O) ." // every bicycle has some part that is a wheel: - + "triplesIDB(?s, ex:hasPart, !x), triplesIDB(!x, rdf:type, ex:wheel) :- triplesIDB(?s, rdf:type, ex:bicycle) ." + + "triplesIDB(?S, ex:hasPart, !X), triplesIDB(!X, rdf:type, ex:wheel) :- triplesIDB(?S, rdf:type, ex:bicycle) ." // every wheel is part of some bicycle: - + "triplesIDB(?s, ex:isPartOf, !x) :- triplesIDB(?s, rdf:type, ex:wheel) ." + + "triplesIDB(?S, ex:isPartOf, !X) :- triplesIDB(?S, rdf:type, ex:wheel) ." // hasPart and isPartOf are mutually inverse relations: - + "triplesIDB(?s, ex:isPartOf, ?o) :- triplesIDB(?o, ex:hasPart, ?s) ." - + "triplesIDB(?s, ex:hasPart, ?o) :- triplesIDB(?o, ex:isPartOf, ?s) ."; + + "triplesIDB(?S, ex:isPartOf, ?O) :- triplesIDB(?O, ex:hasPart, ?S) ." + + "triplesIDB(?S, ex:hasPart, ?O) :- triplesIDB(?O, ex:isPartOf, ?S) ."; RuleParser ruleParser = new RuleParser(); try { @@ -109,8 +110,8 @@ public static void main(final String[] args) reasoner.load(); System.out.println("Before materialisation:"); - final Variable x = makeVariable("x"); - final Variable y = makeVariable("y"); + final Variable x = makeVariable("X"); + final Variable y = makeVariable("Y"); final PositiveLiteral hasPartEDB = makePositiveLiteral(triplesEDB, x, hasPartPredicate, y); ExamplesUtils.printOutQueryAnswers(hasPartEDB, reasoner); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java index 8f1c38270..ca3beaff1 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java @@ -40,11 +40,12 @@ import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.vlog4j.examples.ExamplesUtils; /** * This is a simple example of adding data from the result of a SPARQL query on * a remote database endpoint, using {@link SparqlQueryResultDataSource}. In - * this example, we will query WikiData for titles of publications that have + * this example, we will query Wikidata for titles of publications that have * authors who have children together. * * @author Irina Dragoste @@ -77,6 +78,8 @@ public class AddDataFromSparqlQueryResults { public static void main(final String[] args) throws ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException, IOException { + ExamplesUtils.configureLogging(); + /* * The WikiData SPARQL query endpoint. */ @@ -105,7 +108,7 @@ public static void main(final String[] args) Arrays.asList(titleVariable, motherVariable, fatherVariable)); /* - * We query WikiData with the SPARQL query composed of the query variables and + * We query Wikidata with the SPARQL query composed of the query variables and * query body. The query result is a DataSource we will associate to a * predicate. */ @@ -117,31 +120,26 @@ public static void main(final String[] args) * same arity as the query variables size. In this case, we have 3 query * variables (title, mother and father). */ - final Predicate titleOfPublicationThatHasAuthorsWhoParentTheSameChild = Expressions - .makePredicate("publicationAndAuthorsWhoParentTheSameChild", 3); + final Predicate queryPredicate = Expressions.makePredicate("publicationParents", 3); try (Reasoner reasoner = Reasoner.getInstance()) { /* * The SPARQL query results will be added to the reasoner knowledge base, as - * facts associated to the predicate - * titleOfPublicationThatHasAuthorsWhoParentTheSameChild. + * facts associated to the predicate publicationParents. */ - reasoner.addFactsFromDataSource(titleOfPublicationThatHasAuthorsWhoParentTheSameChild, - sparqlQueryResultDataSource); - + reasoner.addFactsFromDataSource(queryPredicate, sparqlQueryResultDataSource); reasoner.load(); /* * We construct a query PositiveLiteral for the predicated associated to the * SPARQL query result. */ - final PositiveLiteral query = Expressions.makePositiveLiteral( - titleOfPublicationThatHasAuthorsWhoParentTheSameChild, Expressions.makeVariable("x"), + final PositiveLiteral query = Expressions.makePositiveLiteral(queryPredicate, Expressions.makeVariable("x"), Expressions.makeVariable("y"), Expressions.makeVariable("z")); /* We query the reasoner for facts of the SPARQL query result predicate. */ - System.out.println("Titles of publications that have authors who parent the same child:"); + System.out.println("Titles of publications by co-authors who have a child together:"); try (QueryResultIterator queryResultIterator = reasoner.answerQuery(query, false)) { queryResultIterator.forEachRemaining(queryResult -> { final List queryResultTerms = queryResult.getTerms(); @@ -151,21 +149,23 @@ public static void main(final String[] args) }); } + /* + * To do some basic reasoning, we would now like to add the following rule that + * extracts (unique) mothers, fathers, and pairs from the queried data: + * haveChildrenTogether(?y, ?z), isMother(?y), isFather(?z) :- + * publicationParents(?x, ?y, ?z) . + */ final PositiveLiteral haveChildrenTogether = Expressions.makePositiveLiteral("haveChildrenTogether", Expressions.makeVariable("y"), Expressions.makeVariable("z")); final PositiveLiteral isMother = Expressions.makePositiveLiteral("isMother", Expressions.makeVariable("y")); final PositiveLiteral isFather = Expressions.makePositiveLiteral("isFather", Expressions.makeVariable("z")); final Conjunction ruleHeadConjunction = Expressions .makePositiveConjunction(haveChildrenTogether, isMother, isFather); - /* - * haveChildrenTogether(?y, ?z), isMother(?y), isFather(?z) :- - * publicationAndAuthorsWhoParentTheSameChild(?x, ?y, ?z) - */ final Rule rule = Expressions.makeRule(ruleHeadConjunction, Expressions.makeConjunction(query)); /* * We reset the reasoner in order to add the created rule, and reason on the - * data added from the WikiData SPARQL query result. + * data added from the Wikidata SPARQL query result. */ reasoner.resetReasoner(); reasoner.addRules(rule); @@ -173,7 +173,7 @@ public static void main(final String[] args) reasoner.reason(); /* We query the reasoner for facts of the haveChildrenTogether predicate. */ - System.out.println("Pairs of authors who have children together and wrote publications together:"); + System.out.println("Co-authors who have a child:"); try (QueryResultIterator queryResultIterator = reasoner.answerQuery(haveChildrenTogether, false)) { queryResultIterator.forEachRemaining(queryResult -> { final List queryResultTerms = queryResult.getTerms(); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java index 2f4280fed..642c1c1a0 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java @@ -40,6 +40,9 @@ * the Restricted Chase on the same set of rules and facts. Note that the * Restricted Chase is the default reasoning algorithm, as it terminates in most * cases and generates a smaller number of facts. + * + * @TODO Convert to use string-based rules and parse them, instead of building + * rules tediously in Java. * * @author Irina Dragoste * @@ -48,6 +51,9 @@ public class SkolemVsRestrictedChaseTermination { public static void main(final String[] args) throws ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException, IOException { + + ExamplesUtils.configureLogging(); + /* 1. Instantiating entities, rules and facts */ final Predicate bicycleIDB = Expressions.makePredicate("BicycleIDB", 1); final Predicate bicycleEDB = Expressions.makePredicate("BicycleEDB", 1); From 06ad02847f2111400598d795237c6870e06fc758 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 15 Aug 2019 13:33:41 +0200 Subject: [PATCH 0070/1003] Updated test to new IRI handling --- .../src/test/data/input/ternaryFacts.nt | 4 +-- .../test/data/input/ternaryFactsZipped.nt.gz | Bin 62 -> 94 bytes .../LoadDataFromRdfFileTest.java | 26 +++++++++++++----- .../vlog/VLogDataFromRdfFileTest.java | 18 ++++++++---- 4 files changed, 33 insertions(+), 15 deletions(-) diff --git a/vlog4j-core/src/test/data/input/ternaryFacts.nt b/vlog4j-core/src/test/data/input/ternaryFacts.nt index 42a4a191c..427f86fb9 100644 --- a/vlog4j-core/src/test/data/input/ternaryFacts.nt +++ b/vlog4j-core/src/test/data/input/ternaryFacts.nt @@ -1,2 +1,2 @@ -

    . - . \ No newline at end of file + . + "test string" . \ No newline at end of file diff --git a/vlog4j-core/src/test/data/input/ternaryFactsZipped.nt.gz b/vlog4j-core/src/test/data/input/ternaryFactsZipped.nt.gz index 7b7641230945b013855e959359ffe8cf07c71bcd..749fda951edad510536840d9577153650bc30d7c 100644 GIT binary patch literal 94 zcmV-k0HOaMiwFpKK2==+19W9_ZeenHMqy)gb1rUl0JF&`DJihh*H5iT%q_@C)ypqR z*H1RIQ?S7zUVvRV*~m^oj|-~|4ucBq6qHI*i%S%WONuh{(v=kS0EQ*r1BUoVuY_T> expectedTernaryQueryResult = Sets.newSet( - Arrays.asList(makeConstant(""), makeConstant(""), makeConstant("")), - Arrays.asList(makeConstant(""), makeConstant("

    "), makeConstant(""))); + Arrays.asList(makeConstant("http://example.org/c1"), makeConstant("http://example.org/p"), + makeConstant("http://example.org/c2")), + Arrays.asList(makeConstant("http://example.org/c1"), makeConstant("http://example.org/q"), + makeConstant("\"test string\"^^"))); @Test public void testLoadEmptyRdfFile() throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { FileDataSourceTestUtils.testLoadEmptyFile(ternaryPredicate, queryAtom, new RdfFileDataSource(new File(FileDataSourceTestUtils.INPUT_FOLDER + "empty.nt"))); + } + + @Test + public void testLoadEmptyRdfFileGz() + throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { FileDataSourceTestUtils.testLoadEmptyFile(ternaryPredicate, queryAtom, new RdfFileDataSource(new File(FileDataSourceTestUtils.INPUT_FOLDER + "empty.nt.gz"))); } @Test public void testLoadTernaryFactsFromRdfFile() throws ReasonerStateException, EdbIdbSeparationException, - EDBConfigurationException, IOException, IncompatiblePredicateArityException { + EDBConfigurationException, IOException, IncompatiblePredicateArityException { testLoadTernaryFactsFromSingleRdfDataSource(new RdfFileDataSource( new File(FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.unzippedNtFileRoot + ".nt"))); + } + + @Test + public void testLoadTernaryFactsFromRdfFileGz() throws ReasonerStateException, EdbIdbSeparationException, + EDBConfigurationException, IOException, IncompatiblePredicateArityException { testLoadTernaryFactsFromSingleRdfDataSource(new RdfFileDataSource( new File(FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.zippedNtFileRoot + ".nt.gz"))); } @@ -103,8 +115,8 @@ public void testLoadNonexistingRdfFile() @Test public void testLoadRdfInvalidFormat() throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { - final FileDataSource fileDataSource = new RdfFileDataSource( - new File(FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.invalidFormatNtFileNameRoot + ".nt")); + final FileDataSource fileDataSource = new RdfFileDataSource(new File( + FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.invalidFormatNtFileNameRoot + ".nt")); try (final Reasoner reasoner = Reasoner.getInstance()) { reasoner.addFactsFromDataSource(ternaryPredicate, fileDataSource); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromRdfFileTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromRdfFileTest.java index a87768dd2..a91046699 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromRdfFileTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromRdfFileTest.java @@ -23,13 +23,16 @@ */ import static org.junit.Assert.assertTrue; +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeConstant; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.Set; import org.junit.Test; +import org.mockito.internal.util.collections.Sets; import org.semanticweb.vlog4j.core.reasoner.implementation.FileDataSourceTestUtils; import karmaresearch.vlog.AlreadyStartedException; @@ -47,10 +50,12 @@ public class VLogDataFromRdfFileTest { private static final String emptyTernaryPredicateName = "empty"; private static final List> expectedTernaryQueryResult = Arrays.asList( - Arrays.asList(VLogExpressions.makeConstant(""), VLogExpressions.makeConstant(""), - VLogExpressions.makeConstant("")), - Arrays.asList(VLogExpressions.makeConstant(""), VLogExpressions.makeConstant("

    "), - VLogExpressions.makeConstant(""))); + Arrays.asList(VLogExpressions.makeConstant(""), + VLogExpressions.makeConstant(""), + VLogExpressions.makeConstant("")), + Arrays.asList(VLogExpressions.makeConstant(""), + VLogExpressions.makeConstant(""), + VLogExpressions.makeConstant("\"test string\"^^"))); private static List> getTernaryQueryResults(final VLog vLog, final String predicateName) throws NotStartedException { @@ -63,13 +68,14 @@ private static List> getTernaryQueryResults(final VLog vLog, final St } @Test - public void testLoadDataFomRdfString() + public void testLoadDataFromRdfString() throws AlreadyStartedException, EDBConfigurationException, IOException, NotStartedException { final String ternaryPredicateEDBConfig = "EDB0_predname=" + unzippedTernaryPredicateName + "\n" + "EDB0_type=INMEMORY" + "\n" + "EDB0_param0=" + FileDataSourceTestUtils.INPUT_FOLDER + "\n" + "EDB0_param1=" + FileDataSourceTestUtils.unzippedNtFileRoot + "\n" + "EDB1_predname=" + zippedTernaryPredicateName + "\n" + "EDB1_type=INMEMORY" + "\n" + "EDB1_param0=" - + FileDataSourceTestUtils.INPUT_FOLDER + "\n" + "EDB1_param1=" + FileDataSourceTestUtils.zippedNtFileRoot; + + FileDataSourceTestUtils.INPUT_FOLDER + "\n" + "EDB1_param1=" + + FileDataSourceTestUtils.zippedNtFileRoot; final VLog vLog = new VLog(); vLog.start(ternaryPredicateEDBConfig, false); From fefd0b86b3c9082a5cf1c3d0fd6c003a967b7548 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 15 Aug 2019 13:36:53 +0200 Subject: [PATCH 0071/1003] remove unused imports --- .../vlog4j/core/reasoner/vlog/VLogDataFromRdfFileTest.java | 3 --- 1 file changed, 3 deletions(-) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromRdfFileTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromRdfFileTest.java index a91046699..83f41c031 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromRdfFileTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromRdfFileTest.java @@ -23,16 +23,13 @@ */ import static org.junit.Assert.assertTrue; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeConstant; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import java.util.Set; import org.junit.Test; -import org.mockito.internal.util.collections.Sets; import org.semanticweb.vlog4j.core.reasoner.implementation.FileDataSourceTestUtils; import karmaresearch.vlog.AlreadyStartedException; From 6b0c812b6437b82d17dd4352798f526a3b3d2a47 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Thu, 15 Aug 2019 13:40:35 +0200 Subject: [PATCH 0072/1003] add XSD_DOUBLE --- .../org/semanticweb/vlog4j/syntax/common/PrefixDeclarations.java | 1 + 1 file changed, 1 insertion(+) diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/PrefixDeclarations.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/PrefixDeclarations.java index 05bb21d3f..277e93424 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/PrefixDeclarations.java +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/PrefixDeclarations.java @@ -31,6 +31,7 @@ public interface PrefixDeclarations { static final String XSD = "http://www.w3.org/2001/XMLSchema#"; static final String XSD_STRING = "http://www.w3.org/2001/XMLSchema#string"; static final String XSD_DECIMAL = "http://www.w3.org/2001/XMLSchema#decimal"; + static final String XSD_DOUBLE = "http://www.w3.org/2001/XMLSchema#double"; static final String XSD_FLOAT = "http://www.w3.org/2001/XMLSchema#float"; static final String XSD_INTEGER = "http://www.w3.org/2001/XMLSchema#integer"; static final String XSD_BOOLEAN = "http://www.w3.org/2001/XMLSchema#boolean"; From 625fadc614785866368ccf695ec65288bf09da5d Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Thu, 15 Aug 2019 13:41:50 +0200 Subject: [PATCH 0073/1003] add support for numeric values --- .../implementation/javacc/JavaCCRuleParser.jj | 19 ++++++++------- .../vlog4j/syntax/parser/RuleParserBase.java | 24 +++++++++++-------- 2 files changed, 25 insertions(+), 18 deletions(-) diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj index d754117aa..7441689e7 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj @@ -206,12 +206,14 @@ String predicateName() throws PrefixDeclarationException: // 2 if the variable comes from the body of a rule Term term(int itComesFrom) throws PrefixDeclarationException: { - String s; Token t; + String s; + Constant c; } { s = IRI(false) { return Expressions.makeConstant(s); } -| LOOKAHEAD(booleanLiteral()) s = booleanLiteral() { return Expressions.makeConstant(s); } +| LOOKAHEAD(booleanLiteral()) c = booleanLiteral() { return c; } +| c = NumericLiteral() { return c; } | s = RDFLiteral() { return Expressions.makeConstant(s); } | t = < UNIVAR > { @@ -240,9 +242,9 @@ Constant NumericLiteral() : Token t; } { - t = < INTEGER > { return createLiteralInteger(t.image); } -| t = < DECIMAL > { return createLiteralDecimal(t.image); } -| t = < DOUBLE > { return createLiteralDouble(t.image); } + t = < INTEGER > { return createIntegerLiteral(t.image); } +| t = < DECIMAL > { return createDecimalLiteral(t.image); } +| t = < DOUBLE > { return createDoubleLiteral(t.image); } } String RDFLiteral() throws PrefixDeclarationException: @@ -272,12 +274,13 @@ String Langtag() : } } -String booleanLiteral() : +Constant booleanLiteral() : { + Token t; } { - < TRUE > { return "\"true\"^^"; } -| < FALSE > { return "\"false\"^^"; } + t = < TRUE > { return createBooleanLiteral(t.image); } +| t = < FALSE > { return createBooleanLiteral(t.image); } } String String(): diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParserBase.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParserBase.java index d51063e58..a731f0f93 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParserBase.java +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParserBase.java @@ -29,7 +29,7 @@ import org.semanticweb.vlog4j.parser.implementation.javacc.ParseException; import org.semanticweb.vlog4j.syntax.common.PrefixDeclarations; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeConstant; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; /** * Basic methods used in the JavaCC-generated parser. @@ -51,19 +51,23 @@ public class RuleParserBase { final protected List facts = new ArrayList<>(); final protected List queries = new ArrayList<>(); - protected Constant createLiteralInteger(String lexicalForm) { - // this method should be eliminated - return makeConstant(lexicalForm); + protected Constant createBooleanLiteral(String lexicalForm) { + // lexicalForm is one of ['true' or 'false'] + // we remove the quotes and add data type + lexicalForm = lexicalForm.substring(1,lexicalForm.length()-1); + return Expressions.makeConstant(lexicalForm + "^^<" + PrefixDeclarations.XSD_BOOLEAN + ">"); } - protected Constant createLiteralDouble(String lexicalForm) { - // this method should be eliminated - return makeConstant(lexicalForm); + protected Constant createIntegerLiteral(String lexicalForm) { + return Expressions.makeConstant(lexicalForm + "^^<" + PrefixDeclarations.XSD_INTEGER + ">"); } - protected Constant createLiteralDecimal(String lexicalForm) { - // this method should be eliminated - return makeConstant(lexicalForm); + protected Constant createDecimalLiteral(String lexicalForm) { + return Expressions.makeConstant(lexicalForm + "^^<" + PrefixDeclarations.XSD_DECIMAL + ">"); + } + + protected Constant createDoubleLiteral(String lexicalForm) { + return Expressions.makeConstant(lexicalForm + "^^<" + PrefixDeclarations.XSD_DOUBLE + ">"); } protected static String unescapeStr(String s) throws ParseException { From fbdbb253b2eb4b392f596eebfb3df18644266b05 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Thu, 15 Aug 2019 13:42:37 +0200 Subject: [PATCH 0074/1003] add tests for integer, decimal, and double values; fix boolean test error --- .../vlog4j/syntax/parser/RuleParserTest.java | 31 +++++++++++++++++-- 1 file changed, 29 insertions(+), 2 deletions(-) diff --git a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index bb9302c5e..80ffd4f35 100644 --- a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -175,7 +175,7 @@ public void testTrueBooleanLiteral() throws ParsingException { String input = "p('true') ."; RuleParser ruleParser = new RuleParser(); ruleParser.parse(input); - PositiveLiteral trueLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("\"true\"^^<" + PrefixDeclarations.XSD_BOOLEAN + ">")); + PositiveLiteral trueLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("true^^<" + PrefixDeclarations.XSD_BOOLEAN + ">")); assertEquals(Arrays.asList(trueLiteral), ruleParser.getFacts()); } @@ -184,7 +184,34 @@ public void testFalseBooleanLiteral() throws ParsingException { String input = "p('false') ."; RuleParser ruleParser = new RuleParser(); ruleParser.parse(input); - PositiveLiteral trueLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("\"false\"^^<" + PrefixDeclarations.XSD_BOOLEAN + ">")); + PositiveLiteral trueLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("false^^<" + PrefixDeclarations.XSD_BOOLEAN + ">")); + assertEquals(Arrays.asList(trueLiteral), ruleParser.getFacts()); + } + + @Test + public void testIntegerLiteral() throws ParsingException { + String input = "p(42) ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + PositiveLiteral trueLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("42^^<" + PrefixDeclarations.XSD_INTEGER + ">")); + assertEquals(Arrays.asList(trueLiteral), ruleParser.getFacts()); + } + + @Test + public void testDecimalLiteral() throws ParsingException { + String input = "p(-5.0) ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + PositiveLiteral trueLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("-5.0^^<" + PrefixDeclarations.XSD_DECIMAL + ">")); + assertEquals(Arrays.asList(trueLiteral), ruleParser.getFacts()); + } + + @Test + public void testDoubleLiteral() throws ParsingException { + String input = "p(4.2E9) ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + PositiveLiteral trueLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("4.2E9^^<" + PrefixDeclarations.XSD_DOUBLE + ">")); assertEquals(Arrays.asList(trueLiteral), ruleParser.getFacts()); } From 6c01dbc1c2c88d945c8d120517503f9b217a3fc8 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 15 Aug 2019 13:47:01 +0200 Subject: [PATCH 0075/1003] Add method for creating datatype literal constants --- .../vlog4j/core/model/implementation/Expressions.java | 11 +++++++++++ .../implementation/LoadDataFromRdfFileTest.java | 3 ++- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java index c266d3983..5293acdc5 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java @@ -70,6 +70,17 @@ public static Constant makeConstant(String name) { return new ConstantImpl(name); } + /** + * Creates a {@link Constant} that represents a datatype literal. + * + * @param lexicalValue the lexical representation of the data value + * @param datatypeIri the full absolute IRI of the datatype of this literal + * @return a {@link Constant} corresponding to the input. + */ + public static Constant makeDatatypeLiteral(String lexicalValue, String datatypeIri) { + return new ConstantImpl("\"" + lexicalValue + "\"^^<" + datatypeIri + ">"); + } + /** * Creates a {@link Predicate}. * diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java index 03766dff5..00b047851 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java @@ -23,6 +23,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeConstant; +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeDatatypeLiteral; import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeVariable; import java.io.File; @@ -55,7 +56,7 @@ public class LoadDataFromRdfFileTest { Arrays.asList(makeConstant("http://example.org/c1"), makeConstant("http://example.org/p"), makeConstant("http://example.org/c2")), Arrays.asList(makeConstant("http://example.org/c1"), makeConstant("http://example.org/q"), - makeConstant("\"test string\"^^"))); + makeDatatypeLiteral("test string", "http://www.w3.org/2001/XMLSchema#string"))); @Test public void testLoadEmptyRdfFile() From 6a8fe49c83494ab1200afb1312954209cdea0507 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 15 Aug 2019 14:31:10 +0200 Subject: [PATCH 0076/1003] Style typos --- .../vlog4j/core/reasoner/implementation/VLogReasoner.java | 4 +++- .../org/semanticweb/vlog4j/rdf/RdfValueToTermConverter.java | 4 ++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index cdd2d2690..5e159ce19 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -127,7 +127,7 @@ public void addRules(final List rules) throws ReasonerStateException { } public List getRules() { - return Collections.unmodifiableList(this.rules); + return Collections.unmodifiableList(this.rules); } @Override @@ -304,12 +304,14 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBla Validate.notNull(query, "Query atom must not be null!"); final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); + TermQueryResultIterator stringQueryResultIterator; try { stringQueryResultIterator = this.vLog.query(vLogAtom, true, filterBlanks); } catch (final NotStartedException e) { throw new RuntimeException("Inconsistent reasoner state.", e); } + return new QueryResultIterator(stringQueryResultIterator); } diff --git a/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfValueToTermConverter.java b/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfValueToTermConverter.java index 705408978..be8b3764f 100644 --- a/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfValueToTermConverter.java +++ b/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfValueToTermConverter.java @@ -63,12 +63,12 @@ static Term rdfLiteralToConstant(final Literal literal) { } /** - * Serializes the given {@code literal} to the the NTriples format for + * Serializes the given {@code literal} to the NTriples format for * {@link Literal}s, using a canonical representation. * * @param literal * @return a unique string representation of given {@code literal} in canonical - * form. + * form */ static String buildNormalizedStringValue(final Literal literal) { final URI datatype = literal.getDatatype(); From db3de3fe1e54d04046e12433515a3afdbda5f2cb Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 15 Aug 2019 14:31:45 +0200 Subject: [PATCH 0077/1003] Correctly convert terms in in-memory facts! --- .../core/reasoner/implementation/ModelToVLogConverter.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverter.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverter.java index 57c579671..298a8f24c 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverter.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverter.java @@ -81,8 +81,8 @@ static String[] toVLogFactTuple(final PositiveLiteral fact) { final String[] vLogFactTuple = new String[terms.size()]; int i = 0; for (final Term term : terms) { - final String vLogTupleTerm = term.getName(); - vLogFactTuple[i] = vLogTupleTerm; + final karmaresearch.vlog.Term vLogTupleTerm = toVLogTerm(term); + vLogFactTuple[i] = vLogTupleTerm.getName(); i++; } return vLogFactTuple; From e19505541a75a019fe8fb97bf3b87338c5318a3b Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Thu, 15 Aug 2019 15:46:51 +0200 Subject: [PATCH 0078/1003] add tests for numeric values with abbreviated and complete datatypes; rename positiveLiterals --- .../vlog4j/syntax/parser/RuleParserTest.java | 34 ++++++++++++++----- 1 file changed, 26 insertions(+), 8 deletions(-) diff --git a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 80ffd4f35..a998a4ff0 100644 --- a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -184,8 +184,8 @@ public void testFalseBooleanLiteral() throws ParsingException { String input = "p('false') ."; RuleParser ruleParser = new RuleParser(); ruleParser.parse(input); - PositiveLiteral trueLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("false^^<" + PrefixDeclarations.XSD_BOOLEAN + ">")); - assertEquals(Arrays.asList(trueLiteral), ruleParser.getFacts()); + PositiveLiteral falseLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("false^^<" + PrefixDeclarations.XSD_BOOLEAN + ">")); + assertEquals(Arrays.asList(falseLiteral), ruleParser.getFacts()); } @Test @@ -193,8 +193,26 @@ public void testIntegerLiteral() throws ParsingException { String input = "p(42) ."; RuleParser ruleParser = new RuleParser(); ruleParser.parse(input); - PositiveLiteral trueLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("42^^<" + PrefixDeclarations.XSD_INTEGER + ">")); - assertEquals(Arrays.asList(trueLiteral), ruleParser.getFacts()); + PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("42^^<" + PrefixDeclarations.XSD_INTEGER + ">")); + assertEquals(Arrays.asList(integerLiteral), ruleParser.getFacts()); + } + + @Test + public void testAbbreviatedIntegerLiteral() throws ParsingException { + String input = "@prefix xsd: <" + PrefixDeclarations.XSD + "> . " + "p(\"42\"^^xsd:integer) ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("\"42\"^^<" + PrefixDeclarations.XSD_INTEGER + ">")); + assertEquals(Arrays.asList(integerLiteral), ruleParser.getFacts()); + } + + @Test + public void testCompleteIntegerLiteral() throws ParsingException { + String input = "p(\"42\"^^<" + PrefixDeclarations.XSD_INTEGER + "> ) ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("\"42\"^^<" + PrefixDeclarations.XSD_INTEGER + ">")); + assertEquals(Arrays.asList(integerLiteral), ruleParser.getFacts()); } @Test @@ -202,8 +220,8 @@ public void testDecimalLiteral() throws ParsingException { String input = "p(-5.0) ."; RuleParser ruleParser = new RuleParser(); ruleParser.parse(input); - PositiveLiteral trueLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("-5.0^^<" + PrefixDeclarations.XSD_DECIMAL + ">")); - assertEquals(Arrays.asList(trueLiteral), ruleParser.getFacts()); + PositiveLiteral decimalLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("-5.0^^<" + PrefixDeclarations.XSD_DECIMAL + ">")); + assertEquals(Arrays.asList(decimalLiteral), ruleParser.getFacts()); } @Test @@ -211,8 +229,8 @@ public void testDoubleLiteral() throws ParsingException { String input = "p(4.2E9) ."; RuleParser ruleParser = new RuleParser(); ruleParser.parse(input); - PositiveLiteral trueLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("4.2E9^^<" + PrefixDeclarations.XSD_DOUBLE + ">")); - assertEquals(Arrays.asList(trueLiteral), ruleParser.getFacts()); + PositiveLiteral doubleLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("4.2E9^^<" + PrefixDeclarations.XSD_DOUBLE + ">")); + assertEquals(Arrays.asList(doubleLiteral), ruleParser.getFacts()); } @Test From 692935324a8a2f610332bdf3792060c9f4e245ca Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 15 Aug 2019 17:15:59 +0200 Subject: [PATCH 0079/1003] Adjust string literal representation in RDF + method rename --- .../model/implementation/Expressions.java | 7 ++- .../LoadDataFromRdfFileTest.java | 4 +- .../vlog4j/rdf/RdfValueToTermConverter.java | 5 +- .../vlog4j/rdf/TestConvertRdfFileToFacts.java | 51 +++++++++---------- 4 files changed, 34 insertions(+), 33 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java index 5293acdc5..ed26eb363 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java @@ -73,11 +73,16 @@ public static Constant makeConstant(String name) { /** * Creates a {@link Constant} that represents a datatype literal. * + * Note that datatype literal is the common name of the representation of + * specific values for a datatype. We mostly avoid this meaning of + * literal since a literal in logic is typically a negated or non-negated + * atom. + * * @param lexicalValue the lexical representation of the data value * @param datatypeIri the full absolute IRI of the datatype of this literal * @return a {@link Constant} corresponding to the input. */ - public static Constant makeDatatypeLiteral(String lexicalValue, String datatypeIri) { + public static Constant makeDatatypeConstant(String lexicalValue, String datatypeIri) { return new ConstantImpl("\"" + lexicalValue + "\"^^<" + datatypeIri + ">"); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java index 00b047851..3dfd0c47d 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java @@ -23,7 +23,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeConstant; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeDatatypeLiteral; +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeDatatypeConstant; import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeVariable; import java.io.File; @@ -56,7 +56,7 @@ public class LoadDataFromRdfFileTest { Arrays.asList(makeConstant("http://example.org/c1"), makeConstant("http://example.org/p"), makeConstant("http://example.org/c2")), Arrays.asList(makeConstant("http://example.org/c1"), makeConstant("http://example.org/q"), - makeDatatypeLiteral("test string", "http://www.w3.org/2001/XMLSchema#string"))); + makeDatatypeConstant("test string", "http://www.w3.org/2001/XMLSchema#string"))); @Test public void testLoadEmptyRdfFile() diff --git a/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfValueToTermConverter.java b/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfValueToTermConverter.java index be8b3764f..c1db28854 100644 --- a/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfValueToTermConverter.java +++ b/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfValueToTermConverter.java @@ -86,10 +86,11 @@ static String buildNormalizedStringValue(final Literal literal) { sb.append("@"); sb.append(literal.getLanguage()); } else { - if (datatype != null) { - // Append the literal's datatype + if (datatype != null) { // Append the literal's datatype sb.append("^^"); sb.append(NTriplesUtil.toNTriplesString(datatype)); + } else { // Default to string for untyped literals: + sb.append("^^"); } } diff --git a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestConvertRdfFileToFacts.java b/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestConvertRdfFileToFacts.java index 11ab69938..ffeeec456 100644 --- a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestConvertRdfFileToFacts.java +++ b/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestConvertRdfFileToFacts.java @@ -23,6 +23,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeConstant; +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeDatatypeConstant; import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePositiveLiteral; import static org.semanticweb.vlog4j.rdf.RdfModelConverter.RDF_TRIPLE_PREDICATE_NAME; import static org.semanticweb.vlog4j.rdf.RdfTestUtils.RDF_FIRST; @@ -54,33 +55,33 @@ public class TestConvertRdfFileToFacts { private static final Set expectedNormalizedPositiveLiterals = new HashSet<>(Arrays.asList( makePositiveLiteral(RDF_TRIPLE_PREDICATE_NAME, makeConstant("file:/1"), makeConstant("file:/a"), - makeConstant(intoLexical("-1", "integer"))), + makeDatatypeConstant("-1", "http://www.w3.org/2001/XMLSchema#integer")), makePositiveLiteral(RDF_TRIPLE_PREDICATE_NAME, makeConstant("file:/2"), makeConstant("file:/a"), - makeConstant(intoLexical("1", "integer"))), + makeDatatypeConstant("1", "http://www.w3.org/2001/XMLSchema#integer")), makePositiveLiteral(RDF_TRIPLE_PREDICATE_NAME, makeConstant("file:/3"), makeConstant("file:/a"), - makeConstant(intoLexical("-1.0", "decimal"))), + makeDatatypeConstant("-1.0", "http://www.w3.org/2001/XMLSchema#decimal")), makePositiveLiteral(RDF_TRIPLE_PREDICATE_NAME, makeConstant("file:/4"), makeConstant("file:/a"), - makeConstant(intoLexical("1.0", "decimal"))), + makeDatatypeConstant("1.0", "http://www.w3.org/2001/XMLSchema#decimal")), makePositiveLiteral(RDF_TRIPLE_PREDICATE_NAME, makeConstant("file:/5"), makeConstant("file:/a"), - makeConstant(intoLexical("-1.1E1", "double"))), + makeDatatypeConstant("-1.1E1", "http://www.w3.org/2001/XMLSchema#double")), makePositiveLiteral(RDF_TRIPLE_PREDICATE_NAME, makeConstant("file:/6"), makeConstant("file:/a"), - makeConstant(intoLexical("1.1E1", "double"))), + makeDatatypeConstant("1.1E1", "http://www.w3.org/2001/XMLSchema#double")), makePositiveLiteral(RDF_TRIPLE_PREDICATE_NAME, makeConstant("file:/7"), makeConstant("file:/a"), - makeConstant(intoLexical("true", "boolean"))))); + makeDatatypeConstant("true", "http://www.w3.org/2001/XMLSchema#boolean")))); private static final Set expectedLiteralPositiveLiterals = new HashSet<>(Arrays.asList( makePositiveLiteral(RDF_TRIPLE_PREDICATE_NAME, makeConstant("file:/1"), makeConstant("file:/a"), - makeConstant(intoLexical("1", "integer"))), + makeDatatypeConstant("1", "http://www.w3.org/2001/XMLSchema#integer")), makePositiveLiteral(RDF_TRIPLE_PREDICATE_NAME, makeConstant("file:/2"), makeConstant("file:/a"), - makeConstant(intoLexical("1.0", "decimal"))), + makeDatatypeConstant("1.0", "http://www.w3.org/2001/XMLSchema#decimal")), makePositiveLiteral(RDF_TRIPLE_PREDICATE_NAME, makeConstant("file:/3"), makeConstant("file:/a"), - makeConstant(intoLexical("1.0E1", "double"))), + makeDatatypeConstant("1.0E1", "http://www.w3.org/2001/XMLSchema#double")), makePositiveLiteral(RDF_TRIPLE_PREDICATE_NAME, makeConstant("file:/4"), makeConstant("file:/a"), - makeConstant(intoLexical("true", "boolean"))), + makeDatatypeConstant("true", "http://www.w3.org/2001/XMLSchema#boolean")), makePositiveLiteral(RDF_TRIPLE_PREDICATE_NAME, makeConstant("file:/5"), makeConstant("file:/a"), - makeConstant(intoLexical("false", "boolean"))), + makeDatatypeConstant("false", "http://www.w3.org/2001/XMLSchema#boolean")), makePositiveLiteral(RDF_TRIPLE_PREDICATE_NAME, makeConstant("file:/6"), makeConstant("file:/a"), - makeConstant("\"test string\"")))); + makeDatatypeConstant("test string", "http://www.w3.org/2001/XMLSchema#string")))); private static final Set expectedRelativeUriPositiveLiterals = new HashSet<>(Arrays.asList( makePositiveLiteral(RDF_TRIPLE_PREDICATE_NAME, makeConstant("http://example.org/1"), @@ -92,7 +93,8 @@ public class TestConvertRdfFileToFacts { private static final Set expectedEscapedCharacterPositiveLiterals = new HashSet<>( Arrays.asList(makePositiveLiteral(RDF_TRIPLE_PREDICATE_NAME, makeConstant("file:/1"), - makeConstant("file:/a"), makeConstant("\"\\t\\u0008\\n\\r\\u000C\\\"'\\\\\"")))); + makeConstant("file:/a"), makeDatatypeConstant("\\t\\u0008\\n\\r\\u000C\\\"'\\\\", + "http://www.w3.org/2001/XMLSchema#string")))); private static final Set expectedLanguageTagPositiveLiterals = new HashSet<>(Arrays.asList( makePositiveLiteral(RDF_TRIPLE_PREDICATE_NAME, makeConstant("file:/1"), makeConstant("file:/a"), @@ -104,8 +106,7 @@ public class TestConvertRdfFileToFacts { public void testDataTypesNormalized() throws RDFHandlerException, RDFParseException, IOException { final Model model = RdfTestUtils .parseFile(new File(RdfTestUtils.INPUT_FOLDER + "unnormalizedLiteralValues.ttl"), RDFFormat.TURTLE); - final Set PositiveLiteralsFromModel = RdfModelConverter - .rdfModelToPositiveLiterals(model); + final Set PositiveLiteralsFromModel = RdfModelConverter.rdfModelToPositiveLiterals(model); assertEquals(expectedNormalizedPositiveLiterals, PositiveLiteralsFromModel); } @@ -113,8 +114,7 @@ public void testDataTypesNormalized() throws RDFHandlerException, RDFParseExcept public void testLiteralValuesPreserved() throws RDFHandlerException, RDFParseException, IOException { final Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "literalValues.ttl"), RDFFormat.TURTLE); - final Set PositiveLiteralsFromModel = RdfModelConverter - .rdfModelToPositiveLiterals(model); + final Set PositiveLiteralsFromModel = RdfModelConverter.rdfModelToPositiveLiterals(model); assertEquals(expectedLiteralPositiveLiterals, PositiveLiteralsFromModel); } @@ -122,8 +122,7 @@ public void testLiteralValuesPreserved() throws RDFHandlerException, RDFParseExc public void testRelativeURIsMadeAbsolute() throws RDFHandlerException, RDFParseException, IOException { final Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "relativeURIs.ttl"), RDFFormat.TURTLE); - final Set PositiveLiteralsFromModel = RdfModelConverter - .rdfModelToPositiveLiterals(model); + final Set PositiveLiteralsFromModel = RdfModelConverter.rdfModelToPositiveLiterals(model); assertEquals(expectedRelativeUriPositiveLiterals, PositiveLiteralsFromModel); } @@ -131,8 +130,7 @@ public void testRelativeURIsMadeAbsolute() throws RDFHandlerException, RDFParseE public void testEscapedCharactersPreserved() throws RDFHandlerException, RDFParseException, IOException { final Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "escapedCharacters.ttl"), RDFFormat.TURTLE); - final Set PositiveLiteralsFromModel = RdfModelConverter - .rdfModelToPositiveLiterals(model); + final Set PositiveLiteralsFromModel = RdfModelConverter.rdfModelToPositiveLiterals(model); assertEquals(expectedEscapedCharacterPositiveLiterals, PositiveLiteralsFromModel); } @@ -140,8 +138,7 @@ public void testEscapedCharactersPreserved() throws RDFHandlerException, RDFPars public void testLanguageTagsPreserved() throws RDFHandlerException, RDFParseException, IOException { final Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "languageTags.ttl"), RDFFormat.TURTLE); - final Set PositiveLiteralsFromModel = RdfModelConverter - .rdfModelToPositiveLiterals(model); + final Set PositiveLiteralsFromModel = RdfModelConverter.rdfModelToPositiveLiterals(model); assertEquals(expectedLanguageTagPositiveLiterals, PositiveLiteralsFromModel); } @@ -149,8 +146,7 @@ public void testLanguageTagsPreserved() throws RDFHandlerException, RDFParseExce public void testCollectionsPreserved() throws RDFHandlerException, RDFParseException, IOException { final Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "collections.ttl"), RDFFormat.TURTLE); - final Set PositiveLiteralsFromModel = RdfModelConverter - .rdfModelToPositiveLiterals(model); + final Set PositiveLiteralsFromModel = RdfModelConverter.rdfModelToPositiveLiterals(model); final Term blank1 = RdfTestUtils.getObjectOfFirstMatchedTriple(makeConstant("file:/2"), makeConstant("file:/a"), PositiveLiteralsFromModel); @@ -200,8 +196,7 @@ public void testBlankNodesWithSameLabelAreDifferentInDifferentModels() private Set getBlanksFromTurtleFile(final File file) throws RDFParseException, RDFHandlerException, IOException { final Model model = RdfTestUtils.parseFile(file, RDFFormat.TURTLE); - final Set PositiveLiterals = RdfModelConverter - .rdfModelToPositiveLiterals(model); + final Set PositiveLiterals = RdfModelConverter.rdfModelToPositiveLiterals(model); final Set blanks = new HashSet<>(); PositiveLiterals.forEach(positiveLiteral -> blanks.addAll(positiveLiteral.getBlanks())); From 035a6f80d7353fccb791d88437e79c279e3227d2 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 15 Aug 2019 17:23:15 +0200 Subject: [PATCH 0080/1003] Modernised example code --- .../examples/rdf/AddDataFromRdfModel.java | 74 ++++++++----------- 1 file changed, 29 insertions(+), 45 deletions(-) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java index 032e7af18..75553b6c8 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java @@ -29,7 +29,6 @@ import java.net.URL; import java.util.Set; -import org.eclipse.jdt.annotation.NonNull; import org.openrdf.model.Model; import org.openrdf.model.impl.LinkedHashModel; import org.openrdf.rio.RDFFormat; @@ -41,7 +40,6 @@ import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.Reasoner; @@ -51,6 +49,8 @@ import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.examples.ExamplesUtils; import org.semanticweb.vlog4j.rdf.RdfModelConverter; +import org.semanticweb.vlog4j.syntax.parser.ParsingException; +import org.semanticweb.vlog4j.syntax.parser.RuleParser; /** * This example shows how vlog4j-rdf library's utility class @@ -65,6 +65,8 @@ public class AddDataFromRdfModel { public static void main(final String[] args) throws IOException, RDFParseException, RDFHandlerException, URISyntaxException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { + ExamplesUtils.configureLogging(); + /* * Local file containing metadata of publications from ISWC'16 conference, in * RDF/XML format. @@ -72,15 +74,15 @@ public static void main(final String[] args) throws IOException, RDFParseExcepti final File rdfXMLResourceFile = new File(ExamplesUtils.INPUT_FOLDER + "rdf/iswc-2016-complete-alignments.rdf"); final FileInputStream inputStreamISWC2016 = new FileInputStream(rdfXMLResourceFile); /* An RDF Model is obtained from parsing the RDF/XML resource. */ - final Model rdfModelISWC2016 = parseRDFResource(inputStreamISWC2016, rdfXMLResourceFile.toURI(), + final Model rdfModelISWC2016 = parseRdfResource(inputStreamISWC2016, rdfXMLResourceFile.toURI(), RDFFormat.RDFXML); - + /* * Using vlog4j-rdf library, we convert RDF Model triples to facts, each having * the ternary predicate "TRIPLE". */ final Set tripleFactsISWC2016 = RdfModelConverter.rdfModelToPositiveLiterals(rdfModelISWC2016); - System.out.println("Example triple fact from iswc-2016"); + System.out.println("Example triple fact from iswc-2016 dataset:"); System.out.println(" - " + tripleFactsISWC2016.iterator().next()); /* @@ -91,7 +93,7 @@ public static void main(final String[] args) throws IOException, RDFParseExcepti "http://www.scholarlydata.org/dumps/conferences/alignments/iswc-2017-complete-alignments.ttl"); final InputStream inputStreamISWC2017 = turtleResourceURL.openStream(); /* An RDF Model is obtained from parsing the TURTLE resource. */ - final Model rdfModelISWC2017 = parseRDFResource(inputStreamISWC2017, turtleResourceURL.toURI(), + final Model rdfModelISWC2017 = parseRdfResource(inputStreamISWC2017, turtleResourceURL.toURI(), RDFFormat.TURTLE); /* @@ -99,7 +101,7 @@ public static void main(final String[] args) throws IOException, RDFParseExcepti * the ternary predicate "TRIPLE". */ final Set tripleFactsISWC2017 = RdfModelConverter.rdfModelToPositiveLiterals(rdfModelISWC2017); - System.out.println("Example triple fact from iswc-2017"); + System.out.println("Example triple fact from iswc-2017 dataset:"); System.out.println(" - " + tripleFactsISWC2017.iterator().next()); /** @@ -109,41 +111,24 @@ public static void main(final String[] args) throws IOException, RDFParseExcepti */ /* Predicate names of the triples found in both RDF files. */ - final Constant constHasAffiiation = Expressions - .makeConstant("https://w3id.org/scholarlydata/ontology/conference-ontology.owl#hasAffiliation"); - final Constant constWithOrganization = Expressions - .makeConstant("https://w3id.org/scholarlydata/ontology/conference-ontology.owl#withOrganisation"); - final Constant constName = Expressions - .makeConstant("https://w3id.org/scholarlydata/ontology/conference-ontology.owl#name"); - - final Variable varOganization = Expressions.makeVariable("organization"); - final Variable varOganizationName = Expressions.makeVariable("organizationName"); final Variable varPerson = Expressions.makeVariable("person"); - final Variable varAfiliation = Expressions.makeVariable("affiliation"); - - /* Patterns for facts extracted from RDF triples. */ - final PositiveLiteral personHasAffiliation = Expressions.makePositiveLiteral( - RdfModelConverter.RDF_TRIPLE_PREDICATE, varPerson, constHasAffiiation, varAfiliation); - final PositiveLiteral affiliationWithOrganization = Expressions.makePositiveLiteral( - RdfModelConverter.RDF_TRIPLE_PREDICATE, varAfiliation, constWithOrganization, varOganization); - final PositiveLiteral organizationHasName = Expressions.makePositiveLiteral( - RdfModelConverter.RDF_TRIPLE_PREDICATE, varOganization, constName, varOganizationName); - - /* - * We create a Rule that retrieves pairs of persons and their organization name, - * from facts extracted from RDF triples. - */ final Predicate predicateHasOrganizationName = Expressions.makePredicate("hasOrganizationName", 2); - final PositiveLiteral creatorOrganizationName = Expressions.makePositiveLiteral(predicateHasOrganizationName, - varPerson, varOganizationName); /* - * hasOrganizationName(?person, ?organizationName) :- TRIPLE(?person, - * , ?affiliation), TRIPLE(?affiliation, , - * ?organization), TRIPLE(?organization, , ?organizationName) . + * Rule that retrieves pairs of persons and their organization name: */ - final Rule organizationRule = Expressions.makeRule(creatorOrganizationName, personHasAffiliation, - affiliationWithOrganization, organizationHasName); + final String rules = "%%%% We specify the rules syntactically for convenience %%%\n" + + "@prefix cnf: ." + + "hasOrganizationName(?Person, ?OrgName) :- " + + " TRIPLE(?Person, cnf:hasAffiliation, ?Aff), TRIPLE(?Aff, cnf:withOrganisation, ?Org)," + + " TRIPLE(?Org, cnf:name, ?OrgName) ."; + RuleParser ruleParser = new RuleParser(); + try { + ruleParser.parse(rules); + } catch (ParsingException e) { + System.out.println("Failed to parse rules: " + e.getMessage()); + return; + } try (final Reasoner reasoner = Reasoner.getInstance();) { /* @@ -156,20 +141,20 @@ public static void main(final String[] args) throws IOException, RDFParseExcepti * The rule that maps people to their organization name based on facts extracted * from RDF triples is added to the Reasoner's knowledge base. */ - reasoner.addRules(organizationRule); + reasoner.addRules(ruleParser.getRules()); reasoner.load(); reasoner.reason(); /* We query for persons whose organization name is "TU Dresden" . */ - final Constant constantTuDresdenOrganization = Expressions.makeConstant("\"TU Dresden\""); + final Constant constantTuDresden = Expressions.makeDatatypeConstant("TU Dresden", + "http://www.w3.org/2001/XMLSchema#string"); /* hasOrganizationName(?person, "TU Dresden") */ - @NonNull final PositiveLiteral queryTUDresdenParticipantsAtISWC = Expressions - .makePositiveLiteral(predicateHasOrganizationName, varPerson, constantTuDresdenOrganization); + .makePositiveLiteral(predicateHasOrganizationName, varPerson, constantTuDresden); - System.out.println("Participants at ISWC'16 and '17 from Organization 'TU Dresden':"); - System.out.println("( Answers to query " + queryTUDresdenParticipantsAtISWC + " )"); + System.out.println("\nParticipants at ISWC'16 and '17 from Organization 'TU Dresden':"); + System.out.println("(Answers to query " + queryTUDresdenParticipantsAtISWC + ")\n"); try (QueryResultIterator queryResultIterator = reasoner.answerQuery(queryTUDresdenParticipantsAtISWC, false)) { queryResultIterator.forEachRemaining(answer -> System.out @@ -198,10 +183,9 @@ public static void main(final String[] args) throws IOException, RDFParseExcepti * @throws RDFHandlerException If the configured statement handler has * encountered an unrecoverable error. */ - private static Model parseRDFResource(final InputStream inputStream, final URI baseURI, final RDFFormat rdfFormat) + private static Model parseRdfResource(final InputStream inputStream, final URI baseURI, final RDFFormat rdfFormat) throws IOException, RDFParseException, RDFHandlerException { final Model model = new LinkedHashModel(); - final RDFParser rdfParser = Rio.createParser(rdfFormat); rdfParser.setRDFHandler(new StatementCollector(model)); rdfParser.parse(inputStream, baseURI.toString()); From 01148c6767e0a899efac7e357927b22225adf579 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 15 Aug 2019 17:30:28 +0200 Subject: [PATCH 0081/1003] Remove info logs about set method calls --- .../vlog4j/syntax/parser/LocalPrefixDeclarations.java | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/LocalPrefixDeclarations.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/LocalPrefixDeclarations.java index 425c6469c..20488eabb 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/LocalPrefixDeclarations.java +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/LocalPrefixDeclarations.java @@ -26,8 +26,6 @@ import org.semanticweb.vlog4j.syntax.common.PrefixDeclarations; import org.semanticweb.vlog4j.syntax.common.PrefixDeclarationException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** * Implementation of {@link PrefixDeclarations} that is used when parsing data @@ -39,8 +37,6 @@ */ final public class LocalPrefixDeclarations implements PrefixDeclarations { - final static Logger logger = LoggerFactory.getLogger(LocalPrefixDeclarations.class.getName()); - Map prefixes = new HashMap<>(); String baseUri; @@ -64,7 +60,6 @@ public void setPrefix(String prefix, String uri) throws PrefixDeclarationExcepti + ">. It cannot be redefined to mean <" + uri + ">."); } - logger.info("Setting new prefix: " + prefix + ", " + uri); prefixes.put(prefix, uri); } @@ -72,7 +67,6 @@ public void setBase(String baseUri) throws PrefixDeclarationException { if (this.baseUri != null) throw new PrefixDeclarationException( "Base is already defined as <" + this.baseUri + "> and cannot be re-defined as " + baseUri); - logger.info("Setting base URI: " + baseUri); this.baseUri = baseUri; } @@ -80,7 +74,7 @@ public String resolvePrefixedName(String prefixedName) throws PrefixDeclarationE // from the parser we know that prefixedName is of the form: // prefix:something // remember that the prefixes are stored with the colon symbol - // This does not return the surrounding angle brackes <> + // This does not return the surrounding angle brackets <> int idx = prefixedName.indexOf(":") + 1; String prefix = prefixedName.substring(0, idx); From 7730081e9e757a4704ccba0c85378d8b5d9565a4 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Thu, 15 Aug 2019 19:07:59 +0200 Subject: [PATCH 0082/1003] add tests for unicode suport --- .../vlog4j/syntax/parser/RuleParserTest.java | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index a998a4ff0..5458e6754 100644 --- a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -207,7 +207,7 @@ public void testAbbreviatedIntegerLiteral() throws ParsingException { } @Test - public void testCompleteIntegerLiteral() throws ParsingException { + public void testFullIntegerLiteral() throws ParsingException { String input = "p(\"42\"^^<" + PrefixDeclarations.XSD_INTEGER + "> ) ."; RuleParser ruleParser = new RuleParser(); ruleParser.parse(input); @@ -269,6 +269,22 @@ public void testFullLiteral() throws ParsingException { assertEquals(Arrays.asList(fact2), ruleParser.getFacts()); } + @Test + public void testUnicodeLiteral() throws ParsingException { + String input = "p(\"\u0061\u0062\u0063\") ." ; //"abc" + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + assertEquals(Arrays.asList(fact2), ruleParser.getFacts()); + } + + @Test + public void testUnicodeUri() throws ParsingException { + String input = "@base . @prefix ex: . ex:\u0073(c) ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + assertEquals(Arrays.asList(fact), ruleParser.getFacts()); + } + @Test public void testPrefixedLiteral() throws ParsingException { String input = "@prefix xsd: <" + PrefixDeclarations.XSD + "> . " + "p(\"abc\"^^xsd:string) ."; From 195abfd5c3e000799ec0a08b7088336d29b8d483 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Thu, 15 Aug 2019 22:41:53 +0200 Subject: [PATCH 0083/1003] change JAVA_UNICODE_ESCAPE and UNICODE_INPUT options for javacc --- .../parser/implementation/javacc/.gitignore | 1 + .../implementation/javacc/JavaCCRuleParser.jj | 16 ++++++++-------- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/.gitignore b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/.gitignore index 3243e7e57..1b4476464 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/.gitignore +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/.gitignore @@ -5,3 +5,4 @@ /TokenMgrError.java /JavaCCRuleParser.java /JavaCCRuleParserTokenManager.java +/JavaCharStream.java diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj index 7441689e7..989b17b6d 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj @@ -1,13 +1,13 @@ options { - // Use \ u escapes in streams AND use a reader for the query - // => get both raw and escaped unicode - //JAVA_UNICODE_ESCAPE = true; - //UNICODE_INPUT = false; - UNICODE_INPUT = true; - STATIC = false; - //DEBUG_PARSER = true; - //DEBUG_TOKEN_MANAGER = true ; + // Use \ u escapes in streams AND use a reader for the query + // => get both raw and escaped unicode + JAVA_UNICODE_ESCAPE = true; + UNICODE_INPUT = false; + + STATIC = false; + // DEBUG_PARSER = true; + // DEBUG_TOKEN_MANAGER = true ; } PARSER_BEGIN(JavaCCRuleParser) From 5b1d5740de22e0230ad3a0db04bce2a5713881f6 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Thu, 15 Aug 2019 22:43:44 +0200 Subject: [PATCH 0084/1003] fix bug in tests for unicode support --- .../org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 5458e6754..8fbdec575 100644 --- a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -271,7 +271,7 @@ public void testFullLiteral() throws ParsingException { @Test public void testUnicodeLiteral() throws ParsingException { - String input = "p(\"\u0061\u0062\u0063\") ." ; //"abc" + String input = "p(\"\\u0061\\u0062\\u0063\") ." ; //"abc" RuleParser ruleParser = new RuleParser(); ruleParser.parse(input); assertEquals(Arrays.asList(fact2), ruleParser.getFacts()); @@ -279,7 +279,7 @@ public void testUnicodeLiteral() throws ParsingException { @Test public void testUnicodeUri() throws ParsingException { - String input = "@base . @prefix ex: . ex:\u0073(c) ."; + String input = "@base . @prefix ex: . ex:\\u0073(c) ."; RuleParser ruleParser = new RuleParser(); ruleParser.parse(input); assertEquals(Arrays.asList(fact), ruleParser.getFacts()); From 8f74764e07c4bcaf2d1f783f1c409dbab11f1947 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Fri, 16 Aug 2019 10:43:30 +0200 Subject: [PATCH 0085/1003] add test for escapes --- .../vlog4j/syntax/parser/RuleParserTest.java | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 8fbdec575..681213a93 100644 --- a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -251,6 +251,17 @@ public void testStringLiteralEscapes() throws ParsingException { assertEquals(Arrays.asList(fact), ruleParser.getFacts()); } + @Test + public void testStringLiteralAllEscapes() throws ParsingException { + // User input: p("_\n_\t_\r_\b_\f_\'_\"_\\_") + String input = "p(\"_\\n_\\t_\\r_\\b_\\f_\\'_\\\"_\\\\_\") ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + PositiveLiteral fact = Expressions.makePositiveLiteral("p", + Expressions.makeConstant("\"_\n_\t_\r_\b_\f_\'_\"_\\_\"^^<" + PrefixDeclarations.XSD_STRING + ">")); + assertEquals(Arrays.asList(fact), ruleParser.getFacts()); + } + @Test public void testStringLiteralMultiLine() throws ParsingException { String input = "p('''line 1\n\n" + "line 2\n" + "line 3''') ."; // User input: p("a\"b\\c") From b469ee2117a135057bbf2a0fddf9c7df0e6da775 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Fri, 16 Aug 2019 10:45:58 +0200 Subject: [PATCH 0086/1003] remove suport for queries --- .../implementation/javacc/JavaCCRuleParser.jj | 13 +++++++------ .../vlog4j/syntax/parser/RuleParser.java | 4 ---- 2 files changed, 7 insertions(+), 10 deletions(-) diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj index 989b17b6d..99fffc906 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj @@ -88,10 +88,12 @@ void statement() throws PrefixDeclarationException: LOOKAHEAD(rule()) r = rule() { rules.add(r);} | l = positiveLiteral(0) < DOT > //not from a rule { - if (l.getVariables().isEmpty()) - facts.add(l); - else - queries.add(l); + if (l.getVariables().isEmpty()) { + facts.add(l); + } else { + throw new ParseException("Queries are not supported: " + l.toString()); + } + } } @@ -601,8 +603,7 @@ TOKEN : < PN_CHARS_U > | [ "0"-"9" ] | "\u00b7" - | - [ "\u0300"-"\u036f" ] + | [ "\u0300"-"\u036f" ] | [ "\u203f"-"\u2040" ] )* > } diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParser.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParser.java index 087fe19c0..bc7082bea 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParser.java +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParser.java @@ -68,10 +68,6 @@ public List getRules() { return parser.getRules(); } - public List getQueries() { - return parser.getQueries(); - } - public List getFacts() { return parser.getFacts(); } From 3891845384768c47ce94748b9c23cfe521c65331 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Fri, 16 Aug 2019 10:46:13 +0200 Subject: [PATCH 0087/1003] remove uncalled functions --- .../vlog4j/syntax/parser/RuleParserBase.java | 184 ------------------ 1 file changed, 184 deletions(-) diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParserBase.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParserBase.java index a731f0f93..378020c46 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParserBase.java +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParserBase.java @@ -70,10 +70,6 @@ protected Constant createDoubleLiteral(String lexicalForm) { return Expressions.makeConstant(lexicalForm + "^^<" + PrefixDeclarations.XSD_DOUBLE + ">"); } - protected static String unescapeStr(String s) throws ParseException { - return unescape(s, '\\', false, 1, 1); - } - protected static String unescapeStr(String s, int line, int column) throws ParseException { return unescape(s, '\\', false, line, column); } @@ -114,41 +110,6 @@ protected static String unescape(String s, char escape, boolean pointCodeOnly, i column = column + 1; i = i + 1; - // \\u and \\U - if (ch2 == 'u') { - // i points to the \ so i+6 is next character - if (i + 4 >= s.length()) - throw new ParseException("\\u escape too short, line:" + line + ", column: " + column); - int x = hex(s, i + 1, 4, line, column); - sb.append((char) x); - // Jump 1 2 3 4 -- already skipped \ and u - i = i + 4; - column = column + 4; - continue; - } - if (ch2 == 'U') { - // i points to the \ so i+6 is next character - if (i + 8 >= s.length()) - throw new ParseException("\\U escape too short, line:" + line + ", column: " + column); - int x = hex(s, i + 1, 8, line, column); - // Convert to UTF-16 codepoint pair. - sb.append((char) x); - // Jump 1 2 3 4 5 6 7 8 -- already skipped \ and u - i = i + 8; - column = column + 8; - continue; - } - - // Are we doing just point code escapes? - // If so, \X-anything else is legal as a literal "\" and "X" - - if (pointCodeOnly) { - sb.append('\\'); - sb.append(ch2); - i = i + 1; - continue; - } - // Not just codepoints. Must be a legal escape. char ch3 = 0; switch (ch2) { @@ -184,80 +145,6 @@ protected static String unescape(String s, char escape, boolean pointCodeOnly, i return sb.toString(); } - // Line and column that started the escape - protected static int hex(String s, int i, int len, int line, int column) throws ParseException { -// if ( i+len >= s.length() ) -// { -// -// } - int x = 0; - for (int j = i; j < i + len; j++) { - char ch = s.charAt(j); - column++; - int k = 0; - switch (ch) { - case '0': - k = 0; - break; - case '1': - k = 1; - break; - case '2': - k = 2; - break; - case '3': - k = 3; - break; - case '4': - k = 4; - break; - case '5': - k = 5; - break; - case '6': - k = 6; - break; - case '7': - k = 7; - break; - case '8': - k = 8; - break; - case '9': - k = 9; - break; - case 'A': - case 'a': - k = 10; - break; - case 'B': - case 'b': - k = 11; - break; - case 'C': - case 'c': - k = 12; - break; - case 'D': - case 'd': - k = 13; - break; - case 'E': - case 'e': - k = 14; - break; - case 'F': - case 'f': - k = 15; - break; - default: - throw new ParseException("Illegal hex escape: " + ch + ", line:" + line + ", column: " + column); - } - x = (x << 4) + k; - } - return x; - } - /** Remove first and last characters (e.g. ' or "") from a string */ protected static String stripQuotes(String s) { return s.substring(1, s.length() - 1); @@ -288,73 +175,6 @@ protected String strRDFLiteral(String data, String lang, String dt) { return ret + "^^"; } - protected static String unescapePName(String s, int line, int column) throws ParseException { - char escape = '\\'; - int idx = s.indexOf(escape); - - if (idx == -1) - return s; - - int len = s.length(); - StringBuilder sb = new StringBuilder(); - - for (int i = 0; i < len; i++) { - // Copied form unescape abobve - share! - char ch = s.charAt(i); - // Keep line and column numbers. - switch (ch) { - case '\n': - case '\r': - line++; - column = 1; - break; - default: - column++; - break; - } - - if (ch != escape) { - sb.append(ch); - continue; - } - - // Escape - if (i >= s.length() - 1) - throw new ParseException("Illegal escape at end of string, line:" + line + ", column: " + column); - char ch2 = s.charAt(i + 1); - column = column + 1; - i = i + 1; - - switch (ch2) { - case '~': - case '.': - case '-': - case '!': - case '$': - case '&': - case '\'': - case '(': - case ')': - case '*': - case '+': - case ',': - case ';': - case '=': - case ':': - case '/': - case '?': - case '#': - case '@': - case '%': - sb.append(ch2); - break; - default: - throw new ParseException( - "Illegal prefix name escape: " + ch2 + ", line:" + line + ", column: " + column); - } - } - return sb.toString(); - } public List getRules() { return rules; @@ -364,8 +184,4 @@ public List getFacts() { return facts; } - public List getQueries() { - return queries; - } - } From a2a4ceda321af8de42e37cd2f5898f3852b7224d Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Fri, 16 Aug 2019 11:36:55 +0200 Subject: [PATCH 0088/1003] remove getQueries call --- .../org/semanticweb/vlog4j/examples/DoidExample.java | 10 +++++----- .../vlog4j/examples/SimpleExampleJavaCC.java | 10 +++++----- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java index acccb3b38..54883e16a 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java @@ -112,11 +112,11 @@ public static void main(final String[] args) System.out.println("Number of results in queries:"); QueryResultIterator answers; - for (PositiveLiteral l : ruleParser.getQueries()) { - answers = reasoner.answerQuery(l, true); - System.out.print(l.toString()); - System.out.println(": " + ExamplesUtils.iteratorSize(answers)); - } +// for (PositiveLiteral l : ruleParser.getQueries()) { +// answers = reasoner.answerQuery(l, true); +// System.out.print(l.toString()); +// System.out.println(": " + ExamplesUtils.iteratorSize(answers)); +// } System.out.println("Done."); } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleExampleJavaCC.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleExampleJavaCC.java index b547756a6..cff2b9277 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleExampleJavaCC.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleExampleJavaCC.java @@ -70,11 +70,11 @@ public static void main(final String[] args) System.out.println("Number of results in queries:"); QueryResultIterator answers; - for (PositiveLiteral l : rp.getQueries()) { - answers = reasoner.answerQuery(l, true); - System.out.print(l.toString()); - System.out.println(": " + ExamplesUtils.iteratorSize(answers)); - } +// for (PositiveLiteral l : rp.getQueries()) { +// answers = reasoner.answerQuery(l, true); +// System.out.print(l.toString()); +// System.out.println(": " + ExamplesUtils.iteratorSize(answers)); +// } System.out.println("Done."); } } From 24b95130babde53956e7eb91c8f00c8c5b42b75d Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sat, 17 Aug 2019 14:29:34 +0200 Subject: [PATCH 0089/1003] Partial rewrite of simple example --- .../vlog4j/examples/SimpleExampleJavaCC.java | 81 -------------- .../examples/SimpleReasoningExample.java | 104 ++++++++++++++++++ 2 files changed, 104 insertions(+), 81 deletions(-) delete mode 100644 vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleExampleJavaCC.java create mode 100644 vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleExampleJavaCC.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleExampleJavaCC.java deleted file mode 100644 index b547756a6..000000000 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleExampleJavaCC.java +++ /dev/null @@ -1,81 +0,0 @@ -package org.semanticweb.vlog4j.examples; - -/*- - * #%L - * VLog4j Examples - * %% - * Copyright (C) 2018 - 2019 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.io.IOException; - -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; -import org.semanticweb.vlog4j.syntax.parser.ParsingException; -import org.semanticweb.vlog4j.syntax.parser.RuleParser; - -public class SimpleExampleJavaCC { - public static void main(final String[] args) - throws ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException, IOException { - - ExamplesUtils.configureLogging(); - - try (final Reasoner reasoner = Reasoner.getInstance()) { - - String rules = ""; - rules += "@base . \n"; - rules += "

    () . \n"; - rules += "(?x) :-

    (?x) . \n"; - rules += "(?y) . \n"; - rules += "(?x,!y) :- (?x) . \n"; - rules += "(?x,?y) . \n"; - - RuleParser rp = new RuleParser(); - try { - rp.parse(rules); - } catch (ParsingException e) { - System.out.println("Failed to parse rules: " + e.getMessage()); - return; - } - - reasoner.addFacts(rp.getFacts()); - reasoner.addRules(rp.getRules()); - - System.out.println("Rules configured:\n--"); - reasoner.getRules().forEach(System.out::println); - System.out.println("--"); - reasoner.load(); - - System.out.println("Loading completed."); - System.out.println("Starting reasoning (including SPARQL query answering) ..."); - reasoner.reason(); - System.out.println("... reasoning completed.\n--"); - - System.out.println("Number of results in queries:"); - QueryResultIterator answers; - for (PositiveLiteral l : rp.getQueries()) { - answers = reasoner.answerQuery(l, true); - System.out.print(l.toString()); - System.out.println(": " + ExamplesUtils.iteratorSize(answers)); - } - System.out.println("Done."); - } - } -} diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java new file mode 100644 index 000000000..6936fa4e3 --- /dev/null +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java @@ -0,0 +1,104 @@ +package org.semanticweb.vlog4j.examples; + +/*- + * #%L + * VLog4j Examples + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.reasoner.Reasoner; +import org.semanticweb.vlog4j.core.reasoner.exceptions.VLog4jException; +import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; +import org.semanticweb.vlog4j.syntax.parser.ParsingException; +import org.semanticweb.vlog4j.syntax.parser.RuleParser; + +/** + * This example demonstrates the basic usage of VLog4j for rule reasoning. We + * are using a fixed set of rules and facts defined in Java without any external + * sources, and we query for some of the results. + * + * @author Markus Kroetzsch + * + */ +public class SimpleReasoningExample { + + public static void main(final String[] args) throws IOException { + + ExamplesUtils.configureLogging(); // use simple logger for the example + + // Define some facts and rules in VLog's basic syntax: + String rules = "% --- Some facts --- \n" // + + "location(germany,europe). " // + + "location(uk,europe). " // + + "location(saxony,germany). " // + + "location(dresden,saxony). " // + + "city(dresden). " // + + "country(germany). country(uk). " // + + "university(tudresden, germany) . university(uoxford, uk) . " // + + "streetAddress(tudresden, \"Mommsenstraße 9\", \"01069\", \"Dresden\") ." // + + "zipLocation(\"01069\", dresden) ." // + + "% --- Standard recursion: locations are transitive --- \n" // + + "locatedIn(?X,?Y) :- location(?X,?Y) . " // + + "locatedIn(?X,?Z) :- location(?X,?Y), locatedIn(?Y,?Z) . " // + + "% --- Build address facts using the city constant --- \n" // + + "address(?Uni, ?Street, ?ZIP, ?City) :- address(?Uni, ?Street, ?ZIP, ?CityName), zipLocation(?ZIP,?City) ." + + "% --- Value invention: universities have some address --- \n" // + + "address(?Uni, !Street, !ZIP, !City), locatedIn(!City, ?Country) :- university(?Uni, ?Country) ." + + "% --- Negation: organisations in Europe but not in Germany --- \n" // + + "inEuropeOutsideGermany(?Org) :- address(?Org, ?S, ?Z, ?City), locatedIn(?City, europe), ~locatedIn(?City, germany) ."; + + RuleParser ruleParser = new RuleParser(); + try { + ruleParser.parse(rules); + } catch (ParsingException e) { + System.out.println("Failed to parse rules: " + e.getMessage()); + return; + } + + try (final Reasoner reasoner = Reasoner.getInstance()) { + + reasoner.addFacts(ruleParser.getFacts()); + reasoner.addRules(ruleParser.getRules()); + + System.out.println("Rules configured:\n--"); + reasoner.getRules().forEach(System.out::println); + System.out.println("--"); + + reasoner.load(); + + System.out.println("Loading completed."); + System.out.println("Starting reasoning ..."); + reasoner.reason(); + System.out.println("... reasoning completed.\n--"); + + System.out.println("Number of results in queries:"); + QueryResultIterator answers; + // TODO fix query +// for (PositiveLiteral l : ruleParser.getQueries()) { +// answers = reasoner.answerQuery(l, true); +// System.out.print(l.toString()); +// System.out.println(": " + ExamplesUtils.iteratorSize(answers)); +// } + System.out.println("Done."); + } catch (VLog4jException e) { + System.out.println("Error: " + e.getMessage()); + } + } +} From a2c1d5336b0b5c4dfdb89482ada3d03f1f9630f2 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sat, 17 Aug 2019 14:30:45 +0200 Subject: [PATCH 0090/1003] some cleanup --- .../org/semanticweb/vlog4j/examples/DoidExample.java | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java index acccb3b38..4d7f3e9ef 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java @@ -31,9 +31,7 @@ import org.semanticweb.vlog4j.core.reasoner.DataSource; import org.semanticweb.vlog4j.core.reasoner.LogLevel; import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; +import org.semanticweb.vlog4j.core.reasoner.exceptions.VLog4jException; import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; @@ -52,9 +50,7 @@ */ public class DoidExample { - public static void main(final String[] args) - throws ReasonerStateException, IOException, EdbIdbSeparationException, IncompatiblePredicateArityException { - + public static void main(final String[] args) throws IOException { ExamplesUtils.configureLogging(); final URL wikidataSparqlEndpoint = new URL("https://query.wikidata.org/sparql"); @@ -119,6 +115,8 @@ public static void main(final String[] args) } System.out.println("Done."); + } catch (VLog4jException e) { + System.out.println(e.getMessage()); } } From 34754eb62ed8f1f1e54decc3317ef30622869e24 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 19 Aug 2019 10:23:46 +0200 Subject: [PATCH 0091/1003] Minimal example rules that cause vlog error --- .../examples/SimpleReasoningExample.java | 27 ++++++++++--------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java index 36030d01b..339c520f8 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java @@ -46,24 +46,25 @@ public static void main(final String[] args) throws IOException { // Define some facts and rules in VLog's basic syntax: String rules = "% --- Some facts --- \n" // + "location(germany,europe). " // - + "location(uk,europe). " // +// + "location(uk,europe). " // + "location(saxony,germany). " // - + "location(dresden,saxony). " // - + "city(dresden). " // - + "country(germany). country(uk). " // +// + "location(dresden,saxony). " // +// + "city(dresden). " // +// + "country(germany). country(uk). " // + "university(tudresden, germany) . university(uoxford, uk) . " // - + "streetAddress(tudresden, \"Mommsenstraße 9\", \"01069\", \"Dresden\") ." // +// + "streetAddress(tudresden, \"Mommsenstraße 9\", \"01069\", \"Dresden\") ." // + "zipLocation(\"01069\", dresden) ." // - + "% --- Standard recursion: locations are transitive --- \n" // +// + "% --- Standard recursion: locations are transitive --- \n" // + "locatedIn(?X,?Y) :- location(?X,?Y) . " // - + "locatedIn(?X,?Z) :- location(?X,?Y), locatedIn(?Y,?Z) . " // - + "% --- Build address facts using the city constant --- \n" // - + "address(?Uni, ?Street, ?ZIP, ?City) :- address(?Uni, ?Street, ?ZIP, ?CityName), zipLocation(?ZIP,?City) ." - + "% --- Value invention: universities have some address --- \n" // +// + "locatedIn(?X,?Z) :- location(?X,?Y), locatedIn(?Y,?Z) . " // +// + "% --- Build address facts using the city constant --- \n" // +// + "address(?Uni, ?Street, ?ZIP, ?City) :- address(?Uni, ?Street, ?ZIP, ?CityName), zipLocation(?ZIP,?City) ." +// + "% --- Value invention: universities have some address --- \n" // + "address(?Uni, !Street, !ZIP, !City), locatedIn(!City, ?Country) :- university(?Uni, ?Country) ." - + "% --- Negation: organisations in Europe but not in Germany --- \n" // - + "inEuropeOutsideGermany(?Org) :- address(?Org, ?S, ?Z, ?City), locatedIn(?City, europe), ~locatedIn(?City, germany) ."; - +// + "% --- Negation: organisations in Europe but not in Germany --- \n" // + + "inEuropeOutsideGermany(?Org) :- address(?Org, ?S, ?Z, ?City), locatedIn(?City, europe), ~locatedIn(?City, germany) ." + + ""; + RuleParser ruleParser = new RuleParser(); try { ruleParser.parse(rules); From 785d3587f07bb085aebc5963daec4fb5a3e55047 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 19 Aug 2019 11:08:43 +0200 Subject: [PATCH 0092/1003] Cleaner parser code; more tests --- .../implementation/javacc/JavaCCRuleParser.jj | 61 ++++++++----------- .../vlog4j/syntax/parser/RuleParser.java | 5 +- .../vlog4j/syntax/parser/RuleParserBase.java | 44 ++++++++++++- .../vlog4j/syntax/parser/RuleParserTest.java | 58 +++++++++++++++--- 4 files changed, 120 insertions(+), 48 deletions(-) diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj index 99fffc906..d311bb999 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj @@ -15,8 +15,6 @@ package org.semanticweb.vlog4j.parser.implementation.javacc; import java.util.List; import java.util.ArrayList; -import java.util.Set; -import java.util.HashSet; import org.semanticweb.vlog4j.syntax.parser.RuleParserBase; import org.semanticweb.vlog4j.syntax.common.PrefixDeclarationException; @@ -33,13 +31,6 @@ import org.semanticweb.vlog4j.core.model.implementation.Expressions; public class JavaCCRuleParser extends RuleParserBase { - // we use an int to specify where a variable comes from - // 0 if the variable does not come from a rule - // 1 if the variable comes from the head of a rule - // 2 if the variable comes from the body of a rule - Set bodyVars; - Set headExiVars; - Set headUniVars; } PARSER_END(JavaCCRuleParser) @@ -83,10 +74,11 @@ void statement() throws PrefixDeclarationException: { Rule r; PositiveLiteral l; + resetVariableSets(); } { LOOKAHEAD(rule()) r = rule() { rules.add(r);} -| l = positiveLiteral(0) < DOT > //not from a rule +| l = positiveLiteral(FormulaContext.HEAD) < DOT > //not from a rule { if (l.getVariables().isEmpty()) { facts.add(l); @@ -102,14 +94,9 @@ Rule rule() throws PrefixDeclarationException: Rule rule; List < PositiveLiteral > head; List < Literal > body; - bodyVars = new HashSet(); - headExiVars = new HashSet(); - headUniVars = new HashSet(); } { - // 1 rule's head - // 2 rule's body - head = listOfPositiveLiterals(1) < ARROW > body = listOfLiterals(2) < DOT > + head = listOfPositiveLiterals(FormulaContext.HEAD) < ARROW > body = listOfLiterals(FormulaContext.BODY) < DOT > { rule = Expressions.makeRule(Expressions.makePositiveConjunction(head), Expressions.makeConjunction(body)); @@ -129,66 +116,66 @@ Rule rule() throws PrefixDeclarationException: } } -List < PositiveLiteral > listOfPositiveLiterals(int itComesFrom) throws PrefixDeclarationException: +List < PositiveLiteral > listOfPositiveLiterals(FormulaContext context) throws PrefixDeclarationException: { PositiveLiteral l; List < PositiveLiteral > list = new ArrayList < PositiveLiteral > (); } { - l = positiveLiteral(itComesFrom) { list.add(l); } - ( < COMMA > l = positiveLiteral(itComesFrom) { list.add(l); } )* + l = positiveLiteral(context) { list.add(l); } + ( < COMMA > l = positiveLiteral(context) { list.add(l); } )* { return list; } } -List < Literal > listOfLiterals(int itComesFrom) throws PrefixDeclarationException: +List < Literal > listOfLiterals(FormulaContext context) throws PrefixDeclarationException: { Literal l; List < Literal > list = new ArrayList < Literal > (); } { - l = literal(itComesFrom) { list.add(l); } - ( < COMMA > l = literal(itComesFrom) { list.add(l); } )* + l = literal(context) { list.add(l); } + ( < COMMA > l = literal(context) { list.add(l); } )* { return list; } } -Literal literal(int itComesFrom) throws PrefixDeclarationException: +Literal literal(FormulaContext context) throws PrefixDeclarationException: { Literal l = null; } { - l = positiveLiteral(itComesFrom) { return l; } -| l = negativeLiteral(itComesFrom) { return l; } + l = positiveLiteral(context) { return l; } +| l = negativeLiteral(context) { return l; } } -PositiveLiteral positiveLiteral(int itComesFrom) throws PrefixDeclarationException: +PositiveLiteral positiveLiteral(FormulaContext context) throws PrefixDeclarationException: { Token t; List < Term > terms; String predicateName; } { - predicateName = predicateName() < LPAREN > terms = listOfTerms(itComesFrom) < RPAREN > + predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > { return Expressions.makePositiveLiteral(predicateName, terms); } } -NegativeLiteral negativeLiteral(int itComesFrom) throws PrefixDeclarationException: +NegativeLiteral negativeLiteral(FormulaContext context) throws PrefixDeclarationException: { List < Term > terms; String predicateName; } { - < TILDE > predicateName = predicateName() < LPAREN > terms = listOfTerms(itComesFrom) < RPAREN > + < TILDE > predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > { return Expressions.makeNegativeLiteral(predicateName, terms); } } -List < Term > listOfTerms(int itComesFrom) throws PrefixDeclarationException: +List < Term > listOfTerms(FormulaContext context) throws PrefixDeclarationException: { Term t; List < Term > list = new ArrayList < Term > (); } { - t = term(itComesFrom) { list.add(t); } - ( < COMMA > t = term(itComesFrom) { list.add(t); } )* + t = term(context) { list.add(t); } + ( < COMMA > t = term(context) { list.add(t); } )* { return list; } } @@ -206,7 +193,7 @@ String predicateName() throws PrefixDeclarationException: // 0 if the variable does not come from a rule // 1 if the variable comes from the head of a rule // 2 if the variable comes from the body of a rule -Term term(int itComesFrom) throws PrefixDeclarationException: +Term term(FormulaContext context) throws PrefixDeclarationException: { Token t; String s; @@ -220,18 +207,18 @@ Term term(int itComesFrom) throws PrefixDeclarationException: | t = < UNIVAR > { s = t.image.substring(1); - if (itComesFrom == 1) + if (context == FormulaContext.HEAD) headUniVars.add(s); - if (itComesFrom == 2) + else if (context == FormulaContext.BODY) bodyVars.add(s); return Expressions.makeVariable(t.image.substring(1)); } | t = < EXIVAR > { s = t.image.substring(1); - if (itComesFrom == 1) + if (context == FormulaContext.HEAD) headExiVars.add(s); - if (itComesFrom == 2) + if (context == FormulaContext.BODY) throw new ParseException("Existentialy quantified variables can not appear in the body. Line: " + t.beginLine + ", Column: "+ t.beginColumn); return Expressions.makeVariable(t.image.substring(1)); } diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParser.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParser.java index bc7082bea..da62ae017 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParser.java +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParser.java @@ -28,6 +28,7 @@ import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.parser.implementation.javacc.JavaCCRuleParser; import org.semanticweb.vlog4j.parser.implementation.javacc.ParseException; +import org.semanticweb.vlog4j.parser.implementation.javacc.TokenMgrError; import org.semanticweb.vlog4j.syntax.common.PrefixDeclarationException; /** @@ -46,7 +47,7 @@ public void parse(InputStream stream, String encoding) throws ParsingException { parser = new JavaCCRuleParser(stream, encoding); doParse(); } - + public void parse(InputStream stream) throws ParsingException { parse(stream, "UTF-8"); } @@ -59,7 +60,7 @@ public void parse(String input) throws ParsingException { void doParse() throws ParsingException { try { parser.parse(); - } catch (ParseException | PrefixDeclarationException e) { + } catch (ParseException | PrefixDeclarationException | TokenMgrError e) { throw new ParsingException(e.getMessage(), e); } } diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParserBase.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParserBase.java index 378020c46..b9dfed5ee 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParserBase.java +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParserBase.java @@ -22,6 +22,7 @@ import java.util.List; import java.util.ArrayList; +import java.util.HashSet; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; @@ -47,14 +48,47 @@ */ public class RuleParserBase { final protected PrefixDeclarations prefixDeclarations = new LocalPrefixDeclarations(); + final protected List rules = new ArrayList<>(); final protected List facts = new ArrayList<>(); final protected List queries = new ArrayList<>(); + /** + * "Local" variable to remember (universal) body variables during parsing. + */ + final protected HashSet bodyVars = new HashSet(); + /** + * "Local" variable to remember existential head variables during parsing. + */ + final protected HashSet headExiVars = new HashSet();; + /** + * "Local" variable to remember universal head variables during parsing. + */ + final protected HashSet headUniVars = new HashSet();; + + /** + * Defines the context for parsing sub-formulas. + * + * @author Markus Kroetzsch + * + */ + protected enum FormulaContext { + /** + * Formula is to be interpreted in the context of a rule head (positive + * occurrence). + */ + HEAD, + /** + * Formula is to be interpreted in the context of a rule body (negative + * occurrence). + */ + BODY + } + protected Constant createBooleanLiteral(String lexicalForm) { // lexicalForm is one of ['true' or 'false'] // we remove the quotes and add data type - lexicalForm = lexicalForm.substring(1,lexicalForm.length()-1); + lexicalForm = lexicalForm.substring(1, lexicalForm.length() - 1); return Expressions.makeConstant(lexicalForm + "^^<" + PrefixDeclarations.XSD_BOOLEAN + ">"); } @@ -175,6 +209,14 @@ protected String strRDFLiteral(String data, String lang, String dt) { return ret + "^^"; } + /** + * Reset the local set variables used when parsing a rule. + */ + protected void resetVariableSets() { + this.bodyVars.clear(); + this.headExiVars.clear(); + this.headUniVars.clear(); + } public List getRules() { return rules; diff --git a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 681213a93..0851031de 100644 --- a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -117,6 +117,20 @@ public void testMissingPrefix() throws ParsingException { ruleParser.parse(input); } + @Test(expected = ParsingException.class) + public void testNoUniversalLiterals() throws ParsingException { + String input = "p(?X) ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + } + + @Test(expected = ParsingException.class) + public void testNoExistentialLiterals() throws ParsingException { + String input = "p(!X) ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + } + @Test public void testSimpleRule() throws ParsingException { String input = "@base . " + " q(?X, !Y), r(?X, d) :- p(?X,c), p(?X,?Z) . "; @@ -163,6 +177,13 @@ public void testNoConflictingQuantificationVariables() throws ParsingException { ruleParser.parse(input); } + @Test(expected = ParsingException.class) + public void testNoBodyExistential() throws ParsingException { + String input = "p(?X) :- q(?X,!Y) ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + } + @Test(expected = ParsingException.class) public void testNoDollarVariables() throws ParsingException { String input = "p($X) :- q($X) ."; @@ -175,7 +196,8 @@ public void testTrueBooleanLiteral() throws ParsingException { String input = "p('true') ."; RuleParser ruleParser = new RuleParser(); ruleParser.parse(input); - PositiveLiteral trueLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("true^^<" + PrefixDeclarations.XSD_BOOLEAN + ">")); + PositiveLiteral trueLiteral = Expressions.makePositiveLiteral("p", + Expressions.makeConstant("true^^<" + PrefixDeclarations.XSD_BOOLEAN + ">")); assertEquals(Arrays.asList(trueLiteral), ruleParser.getFacts()); } @@ -184,7 +206,8 @@ public void testFalseBooleanLiteral() throws ParsingException { String input = "p('false') ."; RuleParser ruleParser = new RuleParser(); ruleParser.parse(input); - PositiveLiteral falseLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("false^^<" + PrefixDeclarations.XSD_BOOLEAN + ">")); + PositiveLiteral falseLiteral = Expressions.makePositiveLiteral("p", + Expressions.makeConstant("false^^<" + PrefixDeclarations.XSD_BOOLEAN + ">")); assertEquals(Arrays.asList(falseLiteral), ruleParser.getFacts()); } @@ -193,7 +216,8 @@ public void testIntegerLiteral() throws ParsingException { String input = "p(42) ."; RuleParser ruleParser = new RuleParser(); ruleParser.parse(input); - PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("42^^<" + PrefixDeclarations.XSD_INTEGER + ">")); + PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", + Expressions.makeConstant("42^^<" + PrefixDeclarations.XSD_INTEGER + ">")); assertEquals(Arrays.asList(integerLiteral), ruleParser.getFacts()); } @@ -202,7 +226,8 @@ public void testAbbreviatedIntegerLiteral() throws ParsingException { String input = "@prefix xsd: <" + PrefixDeclarations.XSD + "> . " + "p(\"42\"^^xsd:integer) ."; RuleParser ruleParser = new RuleParser(); ruleParser.parse(input); - PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("\"42\"^^<" + PrefixDeclarations.XSD_INTEGER + ">")); + PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", + Expressions.makeConstant("\"42\"^^<" + PrefixDeclarations.XSD_INTEGER + ">")); assertEquals(Arrays.asList(integerLiteral), ruleParser.getFacts()); } @@ -211,7 +236,8 @@ public void testFullIntegerLiteral() throws ParsingException { String input = "p(\"42\"^^<" + PrefixDeclarations.XSD_INTEGER + "> ) ."; RuleParser ruleParser = new RuleParser(); ruleParser.parse(input); - PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("\"42\"^^<" + PrefixDeclarations.XSD_INTEGER + ">")); + PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", + Expressions.makeConstant("\"42\"^^<" + PrefixDeclarations.XSD_INTEGER + ">")); assertEquals(Arrays.asList(integerLiteral), ruleParser.getFacts()); } @@ -220,7 +246,8 @@ public void testDecimalLiteral() throws ParsingException { String input = "p(-5.0) ."; RuleParser ruleParser = new RuleParser(); ruleParser.parse(input); - PositiveLiteral decimalLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("-5.0^^<" + PrefixDeclarations.XSD_DECIMAL + ">")); + PositiveLiteral decimalLiteral = Expressions.makePositiveLiteral("p", + Expressions.makeConstant("-5.0^^<" + PrefixDeclarations.XSD_DECIMAL + ">")); assertEquals(Arrays.asList(decimalLiteral), ruleParser.getFacts()); } @@ -229,7 +256,8 @@ public void testDoubleLiteral() throws ParsingException { String input = "p(4.2E9) ."; RuleParser ruleParser = new RuleParser(); ruleParser.parse(input); - PositiveLiteral doubleLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("4.2E9^^<" + PrefixDeclarations.XSD_DOUBLE + ">")); + PositiveLiteral doubleLiteral = Expressions.makePositiveLiteral("p", + Expressions.makeConstant("4.2E9^^<" + PrefixDeclarations.XSD_DOUBLE + ">")); assertEquals(Arrays.asList(doubleLiteral), ruleParser.getFacts()); } @@ -240,6 +268,13 @@ public void testStringLiteral() throws ParsingException { ruleParser.parse(input); assertEquals(Arrays.asList(fact2), ruleParser.getFacts()); } + + @Test(expected = ParsingException.class) + public void testIncompleteStringLiteral() throws ParsingException { + String input = "p(\"abc) ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + } @Test public void testStringLiteralEscapes() throws ParsingException { @@ -271,6 +306,13 @@ public void testStringLiteralMultiLine() throws ParsingException { Expressions.makeConstant("\"line 1\n\nline 2\nline 3\"^^<" + PrefixDeclarations.XSD_STRING + ">")); assertEquals(Arrays.asList(fact), ruleParser.getFacts()); } + + @Test(expected = ParsingException.class) + public void testIncompleteStringLiteralMultiLine() throws ParsingException { + String input = "p('''abc\ndef'') ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + } @Test public void testFullLiteral() throws ParsingException { @@ -282,7 +324,7 @@ public void testFullLiteral() throws ParsingException { @Test public void testUnicodeLiteral() throws ParsingException { - String input = "p(\"\\u0061\\u0062\\u0063\") ." ; //"abc" + String input = "p(\"\\u0061\\u0062\\u0063\") ."; // "abc" RuleParser ruleParser = new RuleParser(); ruleParser.parse(input); assertEquals(Arrays.asList(fact2), ruleParser.getFacts()); From cfd829064839c52e56ecdf0eb352f0f1a94d5a55 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 19 Aug 2019 12:01:25 +0200 Subject: [PATCH 0093/1003] Support parsing isolated literals --- .../implementation/javacc/JavaCCRuleParser.jj | 2 +- .../vlog4j/syntax/parser/RuleParser.java | 23 +++++++++++++ .../vlog4j/syntax/parser/RuleParserTest.java | 34 +++++++++++++++++-- 3 files changed, 56 insertions(+), 3 deletions(-) diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj index d311bb999..2b252a3e9 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj @@ -83,7 +83,7 @@ void statement() throws PrefixDeclarationException: if (l.getVariables().isEmpty()) { facts.add(l); } else { - throw new ParseException("Queries are not supported: " + l.toString()); + throw new ParseException("Facts with variables are not allowed: " + l.toString()); } } diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParser.java b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParser.java index da62ae017..fabede5d0 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParser.java +++ b/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParser.java @@ -22,14 +22,17 @@ import java.io.ByteArrayInputStream; import java.io.InputStream; +import java.time.format.FormatStyle; import java.util.List; +import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.parser.implementation.javacc.JavaCCRuleParser; import org.semanticweb.vlog4j.parser.implementation.javacc.ParseException; import org.semanticweb.vlog4j.parser.implementation.javacc.TokenMgrError; import org.semanticweb.vlog4j.syntax.common.PrefixDeclarationException; +import org.semanticweb.vlog4j.syntax.parser.RuleParserBase.FormulaContext; /** * Class to access VLog parsing functionality. @@ -57,6 +60,26 @@ public void parse(String input) throws ParsingException { parse(inputStream, "UTF-8"); } + public Literal parseLiteral(String input) throws ParsingException { + InputStream inputStream = new ByteArrayInputStream(input.getBytes()); + JavaCCRuleParser localParser = new JavaCCRuleParser(inputStream, "UTF-8"); + try { + return localParser.literal(FormulaContext.HEAD); + } catch (ParseException | PrefixDeclarationException | TokenMgrError e) { + throw new ParsingException(e.getMessage(), e); + } + } + + public PositiveLiteral parsePositiveLiteral(String input) throws ParsingException { + InputStream inputStream = new ByteArrayInputStream(input.getBytes()); + JavaCCRuleParser localParser = new JavaCCRuleParser(inputStream, "UTF-8"); + try { + return localParser.positiveLiteral(FormulaContext.HEAD); + } catch (ParseException | PrefixDeclarationException | TokenMgrError e) { + throw new ParsingException(e.getMessage(), e); + } + } + void doParse() throws ParsingException { try { parser.parse(); diff --git a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 0851031de..52d8ac550 100644 --- a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -268,7 +268,7 @@ public void testStringLiteral() throws ParsingException { ruleParser.parse(input); assertEquals(Arrays.asList(fact2), ruleParser.getFacts()); } - + @Test(expected = ParsingException.class) public void testIncompleteStringLiteral() throws ParsingException { String input = "p(\"abc) ."; @@ -306,7 +306,7 @@ public void testStringLiteralMultiLine() throws ParsingException { Expressions.makeConstant("\"line 1\n\nline 2\nline 3\"^^<" + PrefixDeclarations.XSD_STRING + ">")); assertEquals(Arrays.asList(fact), ruleParser.getFacts()); } - + @Test(expected = ParsingException.class) public void testIncompleteStringLiteralMultiLine() throws ParsingException { String input = "p('''abc\ndef'') ."; @@ -364,4 +364,34 @@ public void testLineComments() throws ParsingException { assertEquals(Arrays.asList(fact), ruleParser.getFacts()); } + @Test + public void testPositiveLiteral() throws ParsingException { + String input = "(?X,)"; + RuleParser ruleParser = new RuleParser(); + Literal literal = ruleParser.parsePositiveLiteral(input); + assertEquals(atom1, literal); + } + + @Test(expected = ParsingException.class) + public void testPositiveLiteralError() throws ParsingException { + String input = "~ (?X,)"; + RuleParser ruleParser = new RuleParser(); + ruleParser.parsePositiveLiteral(input); + } + + @Test + public void testLiteral() throws ParsingException { + String input = "~ (?X,)"; + RuleParser ruleParser = new RuleParser(); + Literal literal = ruleParser.parseLiteral(input); + assertEquals(negAtom1, literal); + } + + @Test(expected = ParsingException.class) + public void tesLiteralError() throws ParsingException { + String input = "(?X, Date: Mon, 19 Aug 2019 12:01:58 +0200 Subject: [PATCH 0094/1003] Fix example to use new features --- vlog4j-examples/src/main/data/input/doid.rls | 4 -- .../vlog4j/examples/DoidExample.java | 43 +++++++++++-------- 2 files changed, 26 insertions(+), 21 deletions(-) diff --git a/vlog4j-examples/src/main/data/input/doid.rls b/vlog4j-examples/src/main/data/input/doid.rls index 2997f31d4..8a178a9f8 100644 --- a/vlog4j-examples/src/main/data/input/doid.rls +++ b/vlog4j-examples/src/main/data/input/doid.rls @@ -28,7 +28,3 @@ humansWhoDiedOfCancer(?X) :- deathCause(?X, ?Y), diseaseId(?Y, ?Z), cancerDiseas humansWhoDiedOfNoncancer(?X) :- deathCause(?X, ?Y), diseaseId(?Y, ?Z), ~cancerDisease(?Z) . humansWhoDiedOfNoncancer(?X) :- deathCause(?X, ?Y), ~hasDoid(?Y) . -% Queries to be used in example application: -humansWhoDiedOfCancer(?X) . -humansWhoDiedOfNoncancer(?X) . - diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java index f81e0971b..a9f7cb9cf 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java @@ -24,6 +24,8 @@ import java.io.FileInputStream; import java.io.IOException; import java.net.URL; +import java.util.Arrays; +import java.util.List; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; @@ -87,6 +89,7 @@ public static void main(final String[] args) throws IOException { final Predicate recentDeathsCausePredicate = Expressions.makePredicate("recentDeathsCause", 2); reasoner.addFactsFromDataSource(recentDeathsCausePredicate, recentDeathsCauseDataSource); + /* Configure rules */ RuleParser ruleParser = new RuleParser(); try { ruleParser.parse(new FileInputStream(ExamplesUtils.INPUT_FOLDER + "/doid.rls")); @@ -94,32 +97,38 @@ public static void main(final String[] args) throws IOException { System.out.println("Failed to parse rules: " + e.getMessage()); return; } - reasoner.addRules(ruleParser.getRules()); - - System.out.println("Rules configured:\n--"); + System.out.println("Rules used in this example:"); reasoner.getRules().forEach(System.out::println); - System.out.println("--"); + System.out.println(""); + + /* Initialise reasoner and compute inferences */ + System.out.print("Initialising rules and data sources ... "); reasoner.load(); - System.out.println("Loading completed."); - System.out.println("Starting reasoning (including SPARQL query answering) ..."); + System.out.println("completed."); + + System.out.print("Reasoning (including SPARQL query answering) ... "); reasoner.reason(); - System.out.println("... reasoning completed.\n--"); + System.out.println("completed."); - System.out.println("Number of results in queries:"); + /* Execute some queries */ + List queries = Arrays.asList("humansWhoDiedOfCancer(?X)", "humansWhoDiedOfNoncancer(?X)"); QueryResultIterator answers; - // TODO get queries and answer them -// for (PositiveLiteral l : ruleParser.getQueries()) { -// answers = reasoner.answerQuery(l, true); -// System.out.print(l.toString()); -// System.out.println(": " + ExamplesUtils.iteratorSize(answers)); -// } - System.out.println("Done."); + System.out.println("\nNumber of inferred tuples for selected query atoms:"); + for (String queryString : queries) { + try { + PositiveLiteral query = ruleParser.parsePositiveLiteral(queryString); + answers = reasoner.answerQuery(query, true); + System.out.println(" " + query.toString() + ": " + ExamplesUtils.iteratorSize(answers)); + } catch (ParsingException e) { + System.out.println("Failed to parse query: " + e.getMessage()); + } + } + System.out.println("\nDone."); } catch (VLog4jException e) { - System.out.println(e.getMessage()); + System.out.println("The reasoner encountered a problem:" + e.getMessage()); } - } } From fbdc52c9f4b4ce4b2501c0d29a2b66daf4685205 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 19 Aug 2019 12:41:20 +0200 Subject: [PATCH 0095/1003] relocate some modules, packages, and classes --- pom.xml | 2 +- .../api}/PrefixDeclarationException.java | 2 +- .../core/model/api}/PrefixDeclarations.java | 2 +- vlog4j-examples/pom.xml | 2 +- .../vlog4j/examples/DoidExample.java | 4 +- .../vlog4j/examples/ExamplesUtils.java | 48 ++++++++++++------- .../examples/SimpleReasoningExample.java | 19 +++----- .../examples/core/AddDataFromCsvFile.java | 4 +- .../examples/core/AddDataFromRdfFile.java | 4 +- .../examples/rdf/AddDataFromRdfModel.java | 4 +- {vlog4j-syntax => vlog4j-parser}/LICENSE.txt | 0 {vlog4j-syntax => vlog4j-parser}/pom.xml | 4 +- .../parser/LocalPrefixDeclarations.java | 6 +-- .../vlog4j}/parser/ParsingException.java | 2 +- .../vlog4j}/parser/RuleParser.java | 13 +++-- .../vlog4j}/parser/RuleParserBase.java | 7 ++- .../vlog4j/parser}/javacc/.gitignore | 0 .../vlog4j/parser}/javacc/JavaCCRuleParser.jj | 6 +-- .../vlog4j/syntax/parser/RuleParserTest.java | 4 +- 19 files changed, 69 insertions(+), 64 deletions(-) rename {vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common => vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api}/PrefixDeclarationException.java (95%) rename {vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common => vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api}/PrefixDeclarations.java (97%) rename {vlog4j-syntax => vlog4j-parser}/LICENSE.txt (100%) rename {vlog4j-syntax => vlog4j-parser}/pom.xml (97%) rename {vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax => vlog4j-parser/src/main/java/org/semanticweb/vlog4j}/parser/LocalPrefixDeclarations.java (93%) rename {vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax => vlog4j-parser/src/main/java/org/semanticweb/vlog4j}/parser/ParsingException.java (96%) rename {vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax => vlog4j-parser/src/main/java/org/semanticweb/vlog4j}/parser/RuleParser.java (85%) rename {vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax => vlog4j-parser/src/main/java/org/semanticweb/vlog4j}/parser/RuleParserBase.java (97%) rename {vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation => vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser}/javacc/.gitignore (100%) rename {vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation => vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser}/javacc/JavaCCRuleParser.jj (94%) rename {vlog4j-syntax => vlog4j-parser}/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java (98%) diff --git a/pom.xml b/pom.xml index e84e1c78c..13d612650 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ vlog4j-examples vlog4j-owlapi vlog4j-graal - vlog4j-syntax + vlog4j-parser diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/PrefixDeclarationException.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarationException.java similarity index 95% rename from vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/PrefixDeclarationException.java rename to vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarationException.java index 76b8a7d21..0c49c493e 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/PrefixDeclarationException.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarationException.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.syntax.common; +package org.semanticweb.vlog4j.core.model.api; /*- * #%L diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/PrefixDeclarations.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarations.java similarity index 97% rename from vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/PrefixDeclarations.java rename to vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarations.java index 277e93424..70e7fb631 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/common/PrefixDeclarations.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarations.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.syntax.common; +package org.semanticweb.vlog4j.core.model.api; /*- * #%L diff --git a/vlog4j-examples/pom.xml b/vlog4j-examples/pom.xml index 615bd2636..a36d00215 100644 --- a/vlog4j-examples/pom.xml +++ b/vlog4j-examples/pom.xml @@ -39,7 +39,7 @@ ${project.groupId} - vlog4j-syntax + vlog4j-parser ${project.version} diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java index a9f7cb9cf..412d32906 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java @@ -37,8 +37,8 @@ import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; -import org.semanticweb.vlog4j.syntax.parser.ParsingException; -import org.semanticweb.vlog4j.syntax.parser.RuleParser; +import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.RuleParser; /** * This example reasons about human diseases, based on information from the diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java index f8151b408..d1a07b4fe 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java @@ -37,6 +37,8 @@ import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; +import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.RuleParser; public final class ExamplesUtils { @@ -82,12 +84,9 @@ public static void configureLogging() { * Prints out the answers given by {@code reasoner} to the query * ({@code queryAtom}). * - * @param queryAtom - * query to be answered - * @param reasoner - * reasoner to query on - * @throws ReasonerStateException - * in case the reasoner has not yet been loaded. + * @param queryAtom query to be answered + * @param reasoner reasoner to query on + * @throws ReasonerStateException in case the reasoner has not yet been loaded. */ public static void printOutQueryAnswers(final PositiveLiteral queryAtom, final Reasoner reasoner) throws ReasonerStateException { @@ -98,13 +97,31 @@ public static void printOutQueryAnswers(final PositiveLiteral queryAtom, final R } } + /** + * Prints out the answers given by {@code reasoner} to the query + * ({@code queryAtom}). + * + * @param queryAtom query to be answered + * @param reasoner reasoner to query on + * @throws ReasonerStateException in case the reasoner has not yet been loaded. + */ + public static void printOutQueryAnswers(final String queryString, final Reasoner reasoner) + throws ReasonerStateException { + RuleParser ruleParser = new RuleParser(); + try { + PositiveLiteral query = ruleParser.parsePositiveLiteral(queryString); + printOutQueryAnswers(query, reasoner); + } catch (ParsingException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + /** * Returns the size of an iterator. * * @FIXME This is an inefficient way of counting results. It should be done at a * lower level instead - * @param Iterator - * to iterate over + * @param Iterator to iterate over * @return number of elements in iterator */ public static int iteratorSize(Iterator iterator) { @@ -117,10 +134,8 @@ public static int iteratorSize(Iterator iterator) { /** * Creates an Atom with @numberOfVariables distinct variables * - * @param predicateName - * for the new predicate - * @param arity - * number of variables + * @param predicateName for the new predicate + * @param arity number of variables */ private static PositiveLiteral makeQueryAtom(String predicateName, int arity) { final List vars = new ArrayList<>(); @@ -132,12 +147,9 @@ private static PositiveLiteral makeQueryAtom(String predicateName, int arity) { /** * Exports the extension of the Atom with name @predicateName * - * @param reasoner - * reasoner to query on - * @param atomName - * atom's name - * @param arity - * atom's arity + * @param reasoner reasoner to query on + * @param atomName atom's name + * @param arity atom's arity */ public static void exportQueryAnswersToCSV(Reasoner reasoner, String atomName, int arity) throws ReasonerStateException, IOException { diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java index 339c520f8..0fe631915 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java @@ -22,12 +22,10 @@ import java.io.IOException; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.exceptions.VLog4jException; -import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; -import org.semanticweb.vlog4j.syntax.parser.ParsingException; -import org.semanticweb.vlog4j.syntax.parser.RuleParser; +import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.RuleParser; /** * This example demonstrates the basic usage of VLog4j for rule reasoning. We @@ -64,7 +62,7 @@ public static void main(final String[] args) throws IOException { // + "% --- Negation: organisations in Europe but not in Germany --- \n" // + "inEuropeOutsideGermany(?Org) :- address(?Org, ?S, ?Z, ?City), locatedIn(?City, europe), ~locatedIn(?City, germany) ." + ""; - + RuleParser ruleParser = new RuleParser(); try { ruleParser.parse(rules); @@ -89,14 +87,9 @@ public static void main(final String[] args) throws IOException { reasoner.reason(); System.out.println("... reasoning completed.\n--"); - System.out.println("Number of results in queries:"); - QueryResultIterator answers; - // TODO get queries and answer them -// for (PositiveLiteral l : ruleParser.getQueries()) { -// answers = reasoner.answerQuery(l, true); -// System.out.print(l.toString()); -// System.out.println(": " + ExamplesUtils.iteratorSize(answers)); -// } + /* Execute some queries */ + ExamplesUtils.printOutQueryAnswers("inEuropeOutsideGermany(?Org)", reasoner); + System.out.println("Done."); } catch (VLog4jException e) { System.out.println("Error: " + e.getMessage()); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java index 8cdeb89c6..c54d44e24 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java @@ -40,8 +40,8 @@ import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.vlog4j.examples.ExamplesUtils; -import org.semanticweb.vlog4j.syntax.parser.ParsingException; -import org.semanticweb.vlog4j.syntax.parser.RuleParser; +import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.RuleParser; /** * This example shows how facts can be imported from files in the CSV format. diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java index 3edec03f6..0c15fbc40 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java @@ -40,8 +40,8 @@ import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.examples.ExamplesUtils; -import org.semanticweb.vlog4j.syntax.parser.ParsingException; -import org.semanticweb.vlog4j.syntax.parser.RuleParser; +import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.RuleParser; /** * This example shows how facts can be imported from files in the RDF N-Triples diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java index 75553b6c8..88ed798fb 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java @@ -49,8 +49,8 @@ import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.examples.ExamplesUtils; import org.semanticweb.vlog4j.rdf.RdfModelConverter; -import org.semanticweb.vlog4j.syntax.parser.ParsingException; -import org.semanticweb.vlog4j.syntax.parser.RuleParser; +import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.RuleParser; /** * This example shows how vlog4j-rdf library's utility class diff --git a/vlog4j-syntax/LICENSE.txt b/vlog4j-parser/LICENSE.txt similarity index 100% rename from vlog4j-syntax/LICENSE.txt rename to vlog4j-parser/LICENSE.txt diff --git a/vlog4j-syntax/pom.xml b/vlog4j-parser/pom.xml similarity index 97% rename from vlog4j-syntax/pom.xml rename to vlog4j-parser/pom.xml index 3e93770a6..bfbf05a0a 100644 --- a/vlog4j-syntax/pom.xml +++ b/vlog4j-parser/pom.xml @@ -11,9 +11,9 @@ 0.4.0-SNAPSHOT - vlog4j-syntax + vlog4j-parser - VLog4j Syntax + VLog4j Parser http://maven.apache.org UTF-8 diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/LocalPrefixDeclarations.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java similarity index 93% rename from vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/LocalPrefixDeclarations.java rename to vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java index 20488eabb..fdb27f093 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/LocalPrefixDeclarations.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.syntax.parser; +package org.semanticweb.vlog4j.parser; /*- * #%L @@ -24,8 +24,8 @@ import java.util.HashMap; import java.util.Map; -import org.semanticweb.vlog4j.syntax.common.PrefixDeclarations; -import org.semanticweb.vlog4j.syntax.common.PrefixDeclarationException; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationException; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; /** * Implementation of {@link PrefixDeclarations} that is used when parsing data diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/ParsingException.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParsingException.java similarity index 96% rename from vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/ParsingException.java rename to vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParsingException.java index 72d76d0fe..9261af3f9 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/ParsingException.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParsingException.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.syntax.parser; +package org.semanticweb.vlog4j.parser; /*- * #%L diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java similarity index 85% rename from vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParser.java rename to vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index fabede5d0..32d2c8f40 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.syntax.parser; +package org.semanticweb.vlog4j.parser; /*- * #%L @@ -22,17 +22,16 @@ import java.io.ByteArrayInputStream; import java.io.InputStream; -import java.time.format.FormatStyle; import java.util.List; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.parser.implementation.javacc.JavaCCRuleParser; -import org.semanticweb.vlog4j.parser.implementation.javacc.ParseException; -import org.semanticweb.vlog4j.parser.implementation.javacc.TokenMgrError; -import org.semanticweb.vlog4j.syntax.common.PrefixDeclarationException; -import org.semanticweb.vlog4j.syntax.parser.RuleParserBase.FormulaContext; +import org.semanticweb.vlog4j.parser.RuleParserBase.FormulaContext; +import org.semanticweb.vlog4j.parser.javacc.JavaCCRuleParser; +import org.semanticweb.vlog4j.parser.javacc.ParseException; +import org.semanticweb.vlog4j.parser.javacc.TokenMgrError; /** * Class to access VLog parsing functionality. diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParserBase.java similarity index 97% rename from vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParserBase.java rename to vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParserBase.java index b9dfed5ee..5e60357b9 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/syntax/parser/RuleParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParserBase.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.syntax.parser; +package org.semanticweb.vlog4j.parser; /*- * #%L @@ -26,10 +26,9 @@ import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.parser.implementation.javacc.ParseException; -import org.semanticweb.vlog4j.syntax.common.PrefixDeclarations; - +import org.semanticweb.vlog4j.parser.javacc.ParseException; import org.semanticweb.vlog4j.core.model.implementation.Expressions; /** diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/.gitignore b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/.gitignore similarity index 100% rename from vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/.gitignore rename to vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/.gitignore diff --git a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCRuleParser.jj similarity index 94% rename from vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj rename to vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCRuleParser.jj index 2b252a3e9..55463146e 100644 --- a/vlog4j-syntax/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/JavaCCRuleParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCRuleParser.jj @@ -11,13 +11,13 @@ options } PARSER_BEGIN(JavaCCRuleParser) -package org.semanticweb.vlog4j.parser.implementation.javacc; +package org.semanticweb.vlog4j.parser.javacc; import java.util.List; import java.util.ArrayList; -import org.semanticweb.vlog4j.syntax.parser.RuleParserBase; -import org.semanticweb.vlog4j.syntax.common.PrefixDeclarationException; +import org.semanticweb.vlog4j.parser.RuleParserBase; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Literal; diff --git a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java similarity index 98% rename from vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java rename to vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 52d8ac550..5b15a3eb4 100644 --- a/vlog4j-syntax/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -29,10 +29,12 @@ import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.syntax.common.PrefixDeclarations; +import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.RuleParser; public class RuleParserTest { From 0b46eca9deaaec0d2c6b5a887560bb27b9e94f02 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 19 Aug 2019 13:57:57 +0200 Subject: [PATCH 0096/1003] fix directory --- vlog4j-parser/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vlog4j-parser/pom.xml b/vlog4j-parser/pom.xml index bfbf05a0a..824b20e74 100644 --- a/vlog4j-parser/pom.xml +++ b/vlog4j-parser/pom.xml @@ -36,7 +36,7 @@ ruleparser - ${basedir}/src/main/java/org/semanticweb/vlog4j/parser/implementation/javacc/ + ${basedir}/src/main/java/org/semanticweb/vlog4j/parser/javacc/ javacc From e36d5e0f037d7e15186f60b0117baef070287a18 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 19 Aug 2019 14:02:45 +0200 Subject: [PATCH 0097/1003] Move exceptions to new package --- .../exceptions/EdbIdbSeparationException.java | 2 +- .../exceptions/IncompatiblePredicateArityException.java | 2 +- .../api => exceptions}/PrefixDeclarationException.java | 2 +- .../{reasoner => }/exceptions/ReasonerStateException.java | 2 +- .../core/{reasoner => }/exceptions/VLog4jException.java | 2 +- .../vlog4j/core/model/api/PrefixDeclarations.java | 2 ++ .../java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java | 6 +++--- .../vlog4j/core/reasoner/implementation/VLogReasoner.java | 6 +++--- .../org/semanticweb/vlog4j/core/reasoner/LoggingTest.java | 6 +++--- .../vlog4j/core/reasoner/ReasonerTimeoutTest.java | 6 +++--- .../core/reasoner/implementation/AddDataSourceTest.java | 6 +++--- .../core/reasoner/implementation/AnswerQueryTest.java | 6 +++--- .../implementation/ExportQueryAnswersToCsvFileTest.java | 6 +++--- .../reasoner/implementation/FileDataSourceTestUtils.java | 6 +++--- .../implementation/GeneratedAnonymousIndividualsTest.java | 6 +++--- .../reasoner/implementation/LoadDataFromCsvFileTest.java | 6 +++--- .../reasoner/implementation/LoadDataFromMemoryTest.java | 6 +++--- .../reasoner/implementation/LoadDataFromRdfFileTest.java | 6 +++--- .../implementation/LoadDataFromSparqlQueryTest.java | 6 +++--- .../core/reasoner/implementation/ReasonerStateTest.java | 6 +++--- .../vlog4j/core/reasoner/implementation/ReasonerTest.java | 6 +++--- .../implementation/SparqlQueryResultDataSourceTest.java | 6 +++--- .../reasoner/implementation/StratifiedNegationTest.java | 6 +++--- .../java/org/semanticweb/vlog4j/examples/DoidExample.java | 2 +- .../java/org/semanticweb/vlog4j/examples/ExamplesUtils.java | 2 +- .../semanticweb/vlog4j/examples/SimpleReasoningExample.java | 2 +- .../vlog4j/examples/core/AddDataFromCsvFile.java | 6 +++--- .../vlog4j/examples/core/AddDataFromRdfFile.java | 6 +++--- .../vlog4j/examples/core/AddDataFromSparqlQueryResults.java | 6 +++--- .../vlog4j/examples/core/ConfigureReasonerLogging.java | 6 +++--- .../examples/core/SkolemVsRestrictedChaseTermination.java | 6 +++--- .../vlog4j/examples/graal/AddDataFromDlgpFile.java | 6 +++--- .../semanticweb/vlog4j/examples/graal/AddDataFromGraal.java | 6 +++--- .../semanticweb/vlog4j/examples/graal/DoidExampleGraal.java | 6 +++--- .../vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java | 6 +++--- .../vlog4j/examples/rdf/AddDataFromRdfModel.java | 6 +++--- .../semanticweb/vlog4j/parser/LocalPrefixDeclarations.java | 2 +- .../main/java/org/semanticweb/vlog4j/parser/RuleParser.java | 2 +- .../semanticweb/vlog4j/parser/javacc/JavaCCRuleParser.jj | 2 +- .../org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java | 6 +++--- 40 files changed, 97 insertions(+), 95 deletions(-) rename vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/{reasoner => }/exceptions/EdbIdbSeparationException.java (93%) rename vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/{reasoner => }/exceptions/IncompatiblePredicateArityException.java (96%) rename vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/{model/api => exceptions}/PrefixDeclarationException.java (94%) rename vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/{reasoner => }/exceptions/ReasonerStateException.java (96%) rename vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/{reasoner => }/exceptions/VLog4jException.java (94%) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/exceptions/EdbIdbSeparationException.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/EdbIdbSeparationException.java similarity index 93% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/exceptions/EdbIdbSeparationException.java rename to vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/EdbIdbSeparationException.java index 5084a84e4..af51904c7 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/exceptions/EdbIdbSeparationException.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/EdbIdbSeparationException.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.exceptions; +package org.semanticweb.vlog4j.core.exceptions; import java.text.MessageFormat; diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/exceptions/IncompatiblePredicateArityException.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/IncompatiblePredicateArityException.java similarity index 96% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/exceptions/IncompatiblePredicateArityException.java rename to vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/IncompatiblePredicateArityException.java index cc61ae358..c072e49fa 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/exceptions/IncompatiblePredicateArityException.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/IncompatiblePredicateArityException.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.exceptions; +package org.semanticweb.vlog4j.core.exceptions; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarationException.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java similarity index 94% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarationException.java rename to vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java index 0c49c493e..96e05979c 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarationException.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.vlog4j.core.exceptions; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/exceptions/ReasonerStateException.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/ReasonerStateException.java similarity index 96% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/exceptions/ReasonerStateException.java rename to vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/ReasonerStateException.java index eb7fc8135..5b20f275b 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/exceptions/ReasonerStateException.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/ReasonerStateException.java @@ -18,7 +18,7 @@ * #L% */ -package org.semanticweb.vlog4j.core.reasoner.exceptions; +package org.semanticweb.vlog4j.core.exceptions; import java.text.MessageFormat; diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/exceptions/VLog4jException.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/VLog4jException.java similarity index 94% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/exceptions/VLog4jException.java rename to vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/VLog4jException.java index f7d0fca18..efcb1bf29 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/exceptions/VLog4jException.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/VLog4jException.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.exceptions; +package org.semanticweb.vlog4j.core.exceptions; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarations.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarations.java index 70e7fb631..09f4e46b7 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarations.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarations.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.core.model.api; +import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; + /*- * #%L * vlog4j-syntax diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 64424b175..345ad73f0 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -6,14 +6,14 @@ import org.eclipse.jdt.annotation.NonNull; import org.eclipse.jdt.annotation.Nullable; +import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.TermType; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 5e159ce19..3263b158c 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -13,6 +13,9 @@ import java.util.Set; import org.apache.commons.lang3.Validate; +import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; @@ -26,9 +29,6 @@ import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.ReasonerState; import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java index 31fccec7c..83d4d1a06 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java @@ -31,14 +31,14 @@ import java.io.IOException; import org.junit.Test; +import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; public class LoggingTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java index fd982f3c6..7a3c297bf 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java @@ -36,13 +36,13 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.rules.Timeout; +import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; /** * Test case ensuring {@link Reasoner#setReasoningTimeout(Integer)} works as expected and terminates reasoning after the given {@link #timeout}. diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java index 9bf1a7816..41fda702f 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java @@ -28,15 +28,15 @@ import java.util.Set; import org.junit.Test; +import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.DataSource; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; import karmaresearch.vlog.EDBConfigurationException; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java index 5b749138f..a28afec01 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java @@ -34,6 +34,9 @@ import org.junit.Assert; import org.junit.Test; import org.mockito.internal.util.collections.Sets; +import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.QueryResult; @@ -45,9 +48,6 @@ import org.semanticweb.vlog4j.core.reasoner.Algorithm; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; import karmaresearch.vlog.EDBConfigurationException; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExportQueryAnswersToCsvFileTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExportQueryAnswersToCsvFileTest.java index 14a1d9fea..25268d737 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExportQueryAnswersToCsvFileTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExportQueryAnswersToCsvFileTest.java @@ -28,14 +28,14 @@ import java.util.List; import org.junit.Test; +import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; public class ExportQueryAnswersToCsvFileTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java index 64cbfa048..1c273b89f 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java @@ -33,13 +33,13 @@ import org.apache.commons.csv.CSVFormat; import org.apache.commons.csv.CSVParser; +import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.reasoner.Algorithm; import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; /** * Utility class for reading from and writing to data source files. diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java index 0a02854da..164002092 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java @@ -29,6 +29,9 @@ import org.junit.Test; import org.mockito.internal.util.collections.Sets; +import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Rule; @@ -37,9 +40,6 @@ import org.semanticweb.vlog4j.core.reasoner.Algorithm; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; public class GeneratedAnonymousIndividualsTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromCsvFileTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromCsvFileTest.java index a69e8cb57..4c266757e 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromCsvFileTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromCsvFileTest.java @@ -33,14 +33,14 @@ import org.junit.Test; import org.mockito.internal.util.collections.Sets; +import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; import karmaresearch.vlog.EDBConfigurationException; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java index b37e6a9e8..bb2163210 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java @@ -23,14 +23,14 @@ import java.io.IOException; import org.junit.Test; +import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.BlankImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; import karmaresearch.vlog.EDBConfigurationException; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java index 3dfd0c47d..f249a4ac6 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java @@ -34,14 +34,14 @@ import org.junit.Test; import org.mockito.internal.util.collections.Sets; +import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; import karmaresearch.vlog.EDBConfigurationException; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromSparqlQueryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromSparqlQueryTest.java index 77acf6410..7aa0a6a13 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromSparqlQueryTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromSparqlQueryTest.java @@ -30,14 +30,14 @@ import org.junit.Ignore; import org.junit.Test; +import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; public class LoadDataFromSparqlQueryTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java index 8b7cd4d2e..be1d8fb1f 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java @@ -33,6 +33,9 @@ import java.util.Set; import org.junit.Test; +import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; @@ -43,9 +46,6 @@ import org.semanticweb.vlog4j.core.reasoner.Algorithm; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; public class ReasonerStateTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerTest.java index 993bf8619..ed926355b 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerTest.java @@ -31,15 +31,15 @@ import java.util.Set; import org.junit.Test; +import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; import karmaresearch.vlog.EDBConfigurationException; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java index f1d49afbb..a0f57bef4 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java @@ -30,11 +30,11 @@ import org.apache.commons.lang3.StringUtils; import org.junit.Test; +import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; public class SparqlQueryResultDataSourceTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/StratifiedNegationTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/StratifiedNegationTest.java index d4cb1da1c..9136064ea 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/StratifiedNegationTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/StratifiedNegationTest.java @@ -33,6 +33,9 @@ import java.util.Arrays; import org.junit.Test; +import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; @@ -40,9 +43,6 @@ import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; public class StratifiedNegationTest { diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java index 412d32906..0be04b6d0 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java @@ -27,13 +27,13 @@ import java.util.Arrays; import java.util.List; +import org.semanticweb.vlog4j.core.exceptions.VLog4jException; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.DataSource; import org.semanticweb.vlog4j.core.reasoner.LogLevel; import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.exceptions.VLog4jException; import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java index d1a07b4fe..50d03f437 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java @@ -31,11 +31,11 @@ import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.apache.log4j.PatternLayout; +import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java index 0fe631915..e0d731f9e 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java @@ -22,8 +22,8 @@ import java.io.IOException; +import org.semanticweb.vlog4j.core.exceptions.VLog4jException; import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.exceptions.VLog4jException; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java index c54d44e24..01b837497 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java @@ -29,15 +29,15 @@ import java.io.File; import java.io.IOException; +import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.reasoner.DataSource; import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.vlog4j.examples.ExamplesUtils; import org.semanticweb.vlog4j.parser.ParsingException; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java index 0c15fbc40..a7903ba54 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java @@ -29,15 +29,15 @@ import java.io.File; import java.io.IOException; +import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.reasoner.DataSource; import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.examples.ExamplesUtils; import org.semanticweb.vlog4j.parser.ParsingException; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java index ca3beaff1..07f7b2c15 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java @@ -26,6 +26,9 @@ import java.util.LinkedHashSet; import java.util.List; +import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; @@ -35,9 +38,6 @@ import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.DataSource; import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; import org.semanticweb.vlog4j.examples.ExamplesUtils; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java index 3abf42d8a..f340a3978 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java @@ -34,13 +34,13 @@ import org.eclipse.jdt.annotation.NonNull; import org.eclipse.jdt.annotation.Nullable; +import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.reasoner.LogLevel; import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; /** * This class exemplifies setting a log file and log level for VLog reasoner diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java index 642c1c1a0..6377ac2c8 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java @@ -22,6 +22,9 @@ import java.io.IOException; +import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; @@ -30,9 +33,6 @@ import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.Algorithm; import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.examples.ExamplesUtils; /** diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java index 44d599d04..910a02709 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java @@ -27,10 +27,10 @@ import java.util.ArrayList; import java.util.List; +import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.examples.ExamplesUtils; import org.semanticweb.vlog4j.graal.GraalConjunctiveQueryToRule; import org.semanticweb.vlog4j.graal.GraalToVLog4JModelConverter; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java index 90abb2b3b..2891ba93b 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java @@ -24,11 +24,11 @@ import java.util.ArrayList; import java.util.List; +import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.examples.ExamplesUtils; import org.semanticweb.vlog4j.graal.GraalConjunctiveQueryToRule; import org.semanticweb.vlog4j.graal.GraalToVLog4JModelConverter; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java index 30d13cc47..da23067c9 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java @@ -27,6 +27,9 @@ import java.io.IOException; import java.net.URL; +import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; @@ -34,9 +37,6 @@ import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.DataSource; import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java index 526ea2578..e829ba385 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java @@ -29,6 +29,9 @@ import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyCreationException; import org.semanticweb.owlapi.model.OWLOntologyManager; +import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Rule; @@ -36,9 +39,6 @@ import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.examples.ExamplesUtils; import org.semanticweb.vlog4j.owlapi.OwlToRulesConverter; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java index 88ed798fb..5998a3fbb 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java @@ -37,15 +37,15 @@ import org.openrdf.rio.RDFParser; import org.openrdf.rio.Rio; import org.openrdf.rio.helpers.StatementCollector; +import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.examples.ExamplesUtils; import org.semanticweb.vlog4j.rdf.RdfModelConverter; diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java index fdb27f093..bf6c10f36 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java @@ -24,7 +24,7 @@ import java.util.HashMap; import java.util.Map; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationException; +import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; /** diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index 32d2c8f40..0c1aa9802 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -24,9 +24,9 @@ import java.io.InputStream; import java.util.List; +import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.parser.RuleParserBase.FormulaContext; import org.semanticweb.vlog4j.parser.javacc.JavaCCRuleParser; diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCRuleParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCRuleParser.jj index 55463146e..203a589db 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCRuleParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCRuleParser.jj @@ -17,7 +17,7 @@ import java.util.List; import java.util.ArrayList; import org.semanticweb.vlog4j.parser.RuleParserBase; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationException; +import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Literal; diff --git a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java b/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java index 9ec1b5f70..5e114bafe 100644 --- a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java +++ b/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java @@ -40,14 +40,14 @@ import org.openrdf.rio.RDFFormat; import org.openrdf.rio.RDFHandlerException; import org.openrdf.rio.RDFParseException; +import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; public class TestReasonOverRdfFacts { From 54e2977846000e81842c71e9099b7d25a45c4c41 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 19 Aug 2019 14:11:49 +0200 Subject: [PATCH 0098/1003] Make PrefixDeclarationException a VLog4jException --- .../exceptions/PrefixDeclarationException.java | 16 ++++++++-------- .../semanticweb/vlog4j/parser/RuleParser.java | 1 + 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java index 96e05979c..f3806c21d 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java @@ -20,13 +20,13 @@ * #L% */ -public class PrefixDeclarationException extends Exception { - /** - * - */ - private static final long serialVersionUID = 1L; +public class PrefixDeclarationException extends VLog4jException { + /** + * + */ + private static final long serialVersionUID = 1L; - public PrefixDeclarationException(String errorMessage) { - super(errorMessage); - } + public PrefixDeclarationException(String errorMessage) { + super(errorMessage); + } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index 0c1aa9802..e811811d3 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -32,6 +32,7 @@ import org.semanticweb.vlog4j.parser.javacc.JavaCCRuleParser; import org.semanticweb.vlog4j.parser.javacc.ParseException; import org.semanticweb.vlog4j.parser.javacc.TokenMgrError; +import org.semanticweb.vlog4j.parser.ParsingException; /** * Class to access VLog parsing functionality. From 49791ff12deda52d61c4307f0be906e102468d4d Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 19 Aug 2019 14:28:46 +0200 Subject: [PATCH 0099/1003] Renamed JavaCC classes --- .../{RuleParserBase.java => JavaCCParserBase.java} | 2 +- .../org/semanticweb/vlog4j/parser/RuleParser.java | 14 +++++++------- .../{JavaCCRuleParser.jj => JavaCCParser.jj} | 8 ++++---- 3 files changed, 12 insertions(+), 12 deletions(-) rename vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/{RuleParserBase.java => JavaCCParserBase.java} (99%) rename vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/{JavaCCRuleParser.jj => JavaCCParser.jj} (94%) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/JavaCCParserBase.java similarity index 99% rename from vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParserBase.java rename to vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/JavaCCParserBase.java index 5e60357b9..4fbb12c0f 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/JavaCCParserBase.java @@ -45,7 +45,7 @@ * @author Jena developers, Apache Software Foundation (ASF) * */ -public class RuleParserBase { +public class JavaCCParserBase { final protected PrefixDeclarations prefixDeclarations = new LocalPrefixDeclarations(); final protected List rules = new ArrayList<>(); diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index e811811d3..e9e4f6dce 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -28,8 +28,8 @@ import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.parser.RuleParserBase.FormulaContext; -import org.semanticweb.vlog4j.parser.javacc.JavaCCRuleParser; +import org.semanticweb.vlog4j.parser.JavaCCParserBase.FormulaContext; +import org.semanticweb.vlog4j.parser.javacc.JavaCCParser; import org.semanticweb.vlog4j.parser.javacc.ParseException; import org.semanticweb.vlog4j.parser.javacc.TokenMgrError; import org.semanticweb.vlog4j.parser.ParsingException; @@ -44,10 +44,10 @@ */ public class RuleParser { - JavaCCRuleParser parser; + JavaCCParser parser; public void parse(InputStream stream, String encoding) throws ParsingException { - parser = new JavaCCRuleParser(stream, encoding); + parser = new JavaCCParser(stream, encoding); doParse(); } @@ -62,17 +62,17 @@ public void parse(String input) throws ParsingException { public Literal parseLiteral(String input) throws ParsingException { InputStream inputStream = new ByteArrayInputStream(input.getBytes()); - JavaCCRuleParser localParser = new JavaCCRuleParser(inputStream, "UTF-8"); + JavaCCParser localParser = new JavaCCParser(inputStream, "UTF-8"); try { return localParser.literal(FormulaContext.HEAD); } catch (ParseException | PrefixDeclarationException | TokenMgrError e) { throw new ParsingException(e.getMessage(), e); } } - + public PositiveLiteral parsePositiveLiteral(String input) throws ParsingException { InputStream inputStream = new ByteArrayInputStream(input.getBytes()); - JavaCCRuleParser localParser = new JavaCCRuleParser(inputStream, "UTF-8"); + JavaCCParser localParser = new JavaCCParser(inputStream, "UTF-8"); try { return localParser.positiveLiteral(FormulaContext.HEAD); } catch (ParseException | PrefixDeclarationException | TokenMgrError e) { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCRuleParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj similarity index 94% rename from vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCRuleParser.jj rename to vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 203a589db..f3dba3917 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCRuleParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -10,13 +10,13 @@ options // DEBUG_TOKEN_MANAGER = true ; } -PARSER_BEGIN(JavaCCRuleParser) +PARSER_BEGIN(JavaCCParser) package org.semanticweb.vlog4j.parser.javacc; import java.util.List; import java.util.ArrayList; -import org.semanticweb.vlog4j.parser.RuleParserBase; +import org.semanticweb.vlog4j.parser.JavaCCParserBase; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.Rule; @@ -29,11 +29,11 @@ import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -public class JavaCCRuleParser extends RuleParserBase +public class JavaCCParser extends JavaCCParserBase { } -PARSER_END(JavaCCRuleParser) +PARSER_END(JavaCCParser) void parse() throws PrefixDeclarationException: From 82c8b7241ebc76ba0976bf78454f03e48e3c2422 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 19 Aug 2019 14:32:32 +0200 Subject: [PATCH 0100/1003] Ignore renamed files --- .../java/org/semanticweb/vlog4j/parser/javacc/.gitignore | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/.gitignore b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/.gitignore index 1b4476464..887cfb74c 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/.gitignore +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/.gitignore @@ -1,8 +1,8 @@ -/JavaCCRuleParserConstants.java /ParseException.java /SimpleCharStream.java /Token.java /TokenMgrError.java -/JavaCCRuleParser.java -/JavaCCRuleParserTokenManager.java /JavaCharStream.java +/JavaCCParser.java +/JavaCCParserConstants.java +/JavaCCParserTokenManager.java From ca5e721a485c16518fe11ccee563f1ae0212dd6d Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 19 Aug 2019 14:35:39 +0200 Subject: [PATCH 0101/1003] Move JavaCC classes to common package --- .../main/java/org/semanticweb/vlog4j/parser/RuleParser.java | 2 +- .../vlog4j/parser/{ => javacc}/JavaCCParserBase.java | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) rename vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/{ => javacc}/JavaCCParserBase.java (97%) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index e9e4f6dce..27efc7638 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -28,10 +28,10 @@ import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.parser.JavaCCParserBase.FormulaContext; import org.semanticweb.vlog4j.parser.javacc.JavaCCParser; import org.semanticweb.vlog4j.parser.javacc.ParseException; import org.semanticweb.vlog4j.parser.javacc.TokenMgrError; +import org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase.FormulaContext; import org.semanticweb.vlog4j.parser.ParsingException; /** diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java similarity index 97% rename from vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/JavaCCParserBase.java rename to vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 4fbb12c0f..6902426fa 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.parser; +package org.semanticweb.vlog4j.parser.javacc; /*- * #%L @@ -28,8 +28,8 @@ import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.parser.javacc.ParseException; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.parser.LocalPrefixDeclarations; /** * Basic methods used in the JavaCC-generated parser. @@ -71,7 +71,7 @@ public class JavaCCParserBase { * @author Markus Kroetzsch * */ - protected enum FormulaContext { + public enum FormulaContext { /** * Formula is to be interpreted in the context of a rule head (positive * occurrence). From 248a53b4419f20f0c52b847e716096a875d7f546 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 19 Aug 2019 14:36:23 +0200 Subject: [PATCH 0102/1003] Remove unused member --- .../org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java | 1 - 1 file changed, 1 deletion(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 6902426fa..313f9f108 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -50,7 +50,6 @@ public class JavaCCParserBase { final protected List rules = new ArrayList<>(); final protected List facts = new ArrayList<>(); - final protected List queries = new ArrayList<>(); /** * "Local" variable to remember (universal) body variables during parsing. From 472470e121a01ff42e39e501e974ea6e773a11dd Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 19 Aug 2019 14:58:00 +0200 Subject: [PATCH 0103/1003] remove static import --- .../java/org/semanticweb/vlog4j/examples/ExamplesUtils.java | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java index 50d03f437..0ff1564ca 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java @@ -20,8 +20,6 @@ * #L% */ -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeVariable; - import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; @@ -140,7 +138,7 @@ public static int iteratorSize(Iterator iterator) { private static PositiveLiteral makeQueryAtom(String predicateName, int arity) { final List vars = new ArrayList<>(); for (int i = 0; i < arity; i++) - vars.add(makeVariable("x" + i)); + vars.add(Expressions.makeVariable("x" + i)); return Expressions.makePositiveLiteral(predicateName, vars); } From 27020618e31861cfc236e05b0480c5069fe7386b Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 19 Aug 2019 14:59:00 +0200 Subject: [PATCH 0104/1003] Remove boolean shortcuts --- .../vlog4j/parser/javacc/JavaCCParser.jj | 12 +-------- .../parser/javacc/JavaCCParserBase.java | 11 +++----- .../vlog4j/syntax/parser/RuleParserTest.java | 26 +++---------------- 3 files changed, 8 insertions(+), 41 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index f3dba3917..cac2b4bcc 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -16,7 +16,7 @@ package org.semanticweb.vlog4j.parser.javacc; import java.util.List; import java.util.ArrayList; -import org.semanticweb.vlog4j.parser.JavaCCParserBase; +import org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.Rule; @@ -201,7 +201,6 @@ Term term(FormulaContext context) throws PrefixDeclarationException: } { s = IRI(false) { return Expressions.makeConstant(s); } -| LOOKAHEAD(booleanLiteral()) c = booleanLiteral() { return c; } | c = NumericLiteral() { return c; } | s = RDFLiteral() { return Expressions.makeConstant(s); } | t = < UNIVAR > @@ -263,15 +262,6 @@ String Langtag() : } } -Constant booleanLiteral() : -{ - Token t; -} -{ - t = < TRUE > { return createBooleanLiteral(t.image); } -| t = < FALSE > { return createBooleanLiteral(t.image); } -} - String String(): { Token t; diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 313f9f108..b325ad9bd 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -24,12 +24,15 @@ import java.util.ArrayList; import java.util.HashSet; +import org.apache.commons.lang3.tuple.Pair; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.DataSource; import org.semanticweb.vlog4j.parser.LocalPrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.Predicate; /** * Basic methods used in the JavaCC-generated parser. @@ -50,6 +53,7 @@ public class JavaCCParserBase { final protected List rules = new ArrayList<>(); final protected List facts = new ArrayList<>(); +// final protected List> dataSources = new ArrayList<>(); /** * "Local" variable to remember (universal) body variables during parsing. @@ -83,13 +87,6 @@ public enum FormulaContext { BODY } - protected Constant createBooleanLiteral(String lexicalForm) { - // lexicalForm is one of ['true' or 'false'] - // we remove the quotes and add data type - lexicalForm = lexicalForm.substring(1, lexicalForm.length() - 1); - return Expressions.makeConstant(lexicalForm + "^^<" + PrefixDeclarations.XSD_BOOLEAN + ">"); - } - protected Constant createIntegerLiteral(String lexicalForm) { return Expressions.makeConstant(lexicalForm + "^^<" + PrefixDeclarations.XSD_INTEGER + ">"); } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 5b15a3eb4..92ed730e5 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -193,26 +193,6 @@ public void testNoDollarVariables() throws ParsingException { ruleParser.parse(input); } - @Test - public void testTrueBooleanLiteral() throws ParsingException { - String input = "p('true') ."; - RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); - PositiveLiteral trueLiteral = Expressions.makePositiveLiteral("p", - Expressions.makeConstant("true^^<" + PrefixDeclarations.XSD_BOOLEAN + ">")); - assertEquals(Arrays.asList(trueLiteral), ruleParser.getFacts()); - } - - @Test - public void testFalseBooleanLiteral() throws ParsingException { - String input = "p('false') ."; - RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); - PositiveLiteral falseLiteral = Expressions.makePositiveLiteral("p", - Expressions.makeConstant("false^^<" + PrefixDeclarations.XSD_BOOLEAN + ">")); - assertEquals(Arrays.asList(falseLiteral), ruleParser.getFacts()); - } - @Test public void testIntegerLiteral() throws ParsingException { String input = "p(42) ."; @@ -373,14 +353,14 @@ public void testPositiveLiteral() throws ParsingException { Literal literal = ruleParser.parsePositiveLiteral(input); assertEquals(atom1, literal); } - + @Test(expected = ParsingException.class) public void testPositiveLiteralError() throws ParsingException { String input = "~ (?X,)"; RuleParser ruleParser = new RuleParser(); ruleParser.parsePositiveLiteral(input); } - + @Test public void testLiteral() throws ParsingException { String input = "~ (?X,)"; @@ -388,7 +368,7 @@ public void testLiteral() throws ParsingException { Literal literal = ruleParser.parseLiteral(input); assertEquals(negAtom1, literal); } - + @Test(expected = ParsingException.class) public void tesLiteralError() throws ParsingException { String input = "(?X, Date: Mon, 19 Aug 2019 15:00:55 +0200 Subject: [PATCH 0105/1003] update import path --- .../java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index f3dba3917..12917e2b0 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -16,7 +16,7 @@ package org.semanticweb.vlog4j.parser.javacc; import java.util.List; import java.util.ArrayList; -import org.semanticweb.vlog4j.parser.JavaCCParserBase; +import org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.Rule; From f6db44436b27e8b635da5b610118bee81d9c1063 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 19 Aug 2019 15:10:22 +0200 Subject: [PATCH 0106/1003] remove boolean tokens --- .../org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index cac2b4bcc..3f5de2b7f 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -365,10 +365,7 @@ TOKEN : TOKEN [ IGNORE_CASE ] : { - < TRUE : "'true'" > -| < FALSE : "'false'" > - // ------------------------------------------------- -| < INTEGER : ([ "-", "+" ])? < DIGITS > > + < INTEGER : ([ "-", "+" ])? < DIGITS > > | < DECIMAL : ([ "-", "+" ])? From 8dfcbd185741fb791815be0aba1495c717a6dc31 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 19 Aug 2019 17:10:41 +0200 Subject: [PATCH 0107/1003] add new example: counting triangles --- .../vlog4j/examples/CountingTriangles.java | 112 ++++++++++++++++++ 1 file changed, 112 insertions(+) create mode 100644 vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java new file mode 100644 index 000000000..700d6e2a1 --- /dev/null +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java @@ -0,0 +1,112 @@ +package org.semanticweb.vlog4j.examples; + +/*- + * #%L + * VLog4j Examples + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; +import java.net.URL; + +import org.semanticweb.vlog4j.core.exceptions.VLog4jException; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.DataSource; +import org.semanticweb.vlog4j.core.reasoner.LogLevel; +import org.semanticweb.vlog4j.core.reasoner.Reasoner; +import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; +import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.RuleParser; + +/** + * In this example we count the number of triangles in the reflexive + * sharingBorderWith relation from Wikidata. + * + * @author Markus Kroetzsch + * @author Larry Gonzalez + * + */ +public class CountingTriangles { + + public static void main(final String[] args) throws IOException { + + final URL wikidataSparqlEndpoint = new URL("https://query.wikidata.org/sparql"); + + ExamplesUtils.configureLogging(); // use simple logger for the example + + try (final Reasoner reasoner = Reasoner.getInstance()) { + reasoner.setLogFile(ExamplesUtils.OUTPUT_FOLDER + "vlog.log"); + reasoner.setLogLevel(LogLevel.DEBUG); + + // (wdt:P47 = "Sharing border with") + // list of sharing border countries + final String sparqlCountriesSharingBorders = "?country1 wdt:P31 wd:Q6256 . ?country2 wdt:P31 wd:Q6256 . ?country1 wdt:P47 ?country2 ."; + + final DataSource sharingBordersDataSource = new SparqlQueryResultDataSource(wikidataSparqlEndpoint, + "country1,country2", sparqlCountriesSharingBorders); + final Predicate sharingBordersPredicate = Expressions.makePredicate("sharingBorders", 2); + reasoner.addFactsFromDataSource(sharingBordersPredicate, sharingBordersDataSource); + + // We compute the reflexive relation from "sharingBorders", and then we count + // the number of triangles + String rules = "" // + + "reflexiveBorder(?X,?Y) :- sharingBorders(?X,?Y) .\n" + + "reflexiveBorder(?Y,?X) :- reflexiveBorder(?X,?Y) .\n" + + "triangle(?X,?Y,?Z) :- reflexiveBorder(?X,?Y), reflexiveBorder(?Y,?Z), reflexiveBorder(?Z,?X) . \n"; + + RuleParser ruleParser = new RuleParser(); + try { + ruleParser.parse(rules); + } catch (ParsingException e) { + System.out.println("Failed to parse rules: " + e.getMessage()); + return; + } + + reasoner.addRules(ruleParser.getRules()); + + System.out.println("Rules configured:\n--"); + reasoner.getRules().forEach(System.out::println); + System.out.println("--"); + + reasoner.load(); + + System.out.println("Loading completed."); + System.out.println("Starting reasoning ..."); + reasoner.reason(); + System.out.println("... reasoning completed.\n--"); + + /* Execute a query */ + try { + PositiveLiteral query = ruleParser.parsePositiveLiteral("triangle(?X,?Y,?Z)"); + QueryResultIterator answers = reasoner.answerQuery(query, true); + // Note that we divide it by 6 + System.out.println("The number of triangles in the sharesBorderWith relation (from Wikidata) is: " + + ": " + ExamplesUtils.iteratorSize(answers) / 6); + } catch (ParsingException e) { + System.out.println("Failed to parse query: " + e.getMessage()); + } + + System.out.println("Done."); + } catch (VLog4jException e) { + System.out.println("The reasoner encountered a problem:" + e.getMessage()); + } + + } +} From 5573ecffced184663d657c532346632e6ef1d6ad Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 19 Aug 2019 17:20:26 +0200 Subject: [PATCH 0108/1003] cleanup for triangle counting example --- .../org/semanticweb/vlog4j/examples/CountingTriangles.java | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java index 700d6e2a1..2799f6b0b 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java @@ -64,10 +64,9 @@ public static void main(final String[] args) throws IOException { final Predicate sharingBordersPredicate = Expressions.makePredicate("sharingBorders", 2); reasoner.addFactsFromDataSource(sharingBordersPredicate, sharingBordersDataSource); - // We compute the reflexive relation from "sharingBorders", and then we count - // the number of triangles - String rules = "" // - + "reflexiveBorder(?X,?Y) :- sharingBorders(?X,?Y) .\n" + // We compute the reflexive relation from "sharingBorders", and then the + // triangle relation + String rules = "reflexiveBorder(?X,?Y) :- sharingBorders(?X,?Y) .\n" + "reflexiveBorder(?Y,?X) :- reflexiveBorder(?X,?Y) .\n" + "triangle(?X,?Y,?Z) :- reflexiveBorder(?X,?Y), reflexiveBorder(?Y,?Z), reflexiveBorder(?Z,?X) . \n"; From a44b3a9ec116222ae8f5e33ef3b8a45e1764b473 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 19 Aug 2019 17:47:24 +0200 Subject: [PATCH 0109/1003] Support first version of @source declarations --- vlog4j-examples/src/main/data/input/doid.rls | 19 ++-- .../vlog4j/examples/DoidExample.java | 39 +------- .../examples/core/AddDataFromCsvFile.java | 67 +++++--------- .../examples/core/AddDataFromRdfFile.java | 57 +++++------- .../semanticweb/vlog4j/parser/RuleParser.java | 8 ++ .../vlog4j/parser/javacc/JavaCCParser.jj | 91 ++++++++++++++++--- .../parser/javacc/JavaCCParserBase.java | 11 ++- .../vlog4j/syntax/parser/RuleParserTest.java | 52 +++++++++++ 8 files changed, 208 insertions(+), 136 deletions(-) diff --git a/vlog4j-examples/src/main/data/input/doid.rls b/vlog4j-examples/src/main/data/input/doid.rls index 8a178a9f8..545febc5e 100644 --- a/vlog4j-examples/src/main/data/input/doid.rls +++ b/vlog4j-examples/src/main/data/input/doid.rls @@ -1,10 +1,17 @@ @prefix rdfs: . - -%%%%% Data sources used in this example (with input predicate names): -% doidTriple(S,P,O) -- triples from DOID ontology (loaded from RDF) -% recentDeaths(human) -- human who died recently (Wikidata IRI) -% recentDeathsCause(human, deathCause) -- cause of death of a human (both Wikidata IRIs) -% diseaseId(diseaseIri, doid) -- disease from Wikidata (IRI) with DOID (string identifier) +@prefix wdqs: . + +@source doidTriple(3): load-rdf("src/main/data/input/doid.nt.gz") . +@source diseaseId(2): sparql(wdqs:sparql, "disease,doid", "?disease wdt:P699 ?doid .") . +@source recentDeaths(1): sparql(wdqs:sparql, "human", + '''?human wdt:P31 wd:Q5; + wdt:P570 ?deathDate . + FILTER (YEAR(?deathDate) = 2018)''') . +@source recentDeathsCause(2): sparql(wdqs:sparql, "human,causeOfDeath", + '''?human wdt:P31 wd:Q5; + wdt:P570 ?deathDate ; + wdt:P509 ?causeOfDeath . + FILTER (YEAR(?deathDate) = 2018)''') . % Combine recent death data (infer "unknown" cause if no cause given): deathCause(?X, ?Z) :- recentDeathsCause(?X, ?Z) . diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java index 0be04b6d0..8fbc5f191 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java @@ -20,23 +20,19 @@ * #L% */ -import java.io.File; import java.io.FileInputStream; import java.io.IOException; -import java.net.URL; import java.util.Arrays; import java.util.List; +import org.apache.commons.lang3.tuple.Pair; import org.semanticweb.vlog4j.core.exceptions.VLog4jException; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.DataSource; import org.semanticweb.vlog4j.core.reasoner.LogLevel; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; -import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; @@ -55,40 +51,10 @@ public class DoidExample { public static void main(final String[] args) throws IOException { ExamplesUtils.configureLogging(); - final URL wikidataSparqlEndpoint = new URL("https://query.wikidata.org/sparql"); - try (final Reasoner reasoner = Reasoner.getInstance()) { reasoner.setLogFile(ExamplesUtils.OUTPUT_FOLDER + "vlog.log"); reasoner.setLogLevel(LogLevel.DEBUG); - /* Configure RDF data source */ - final Predicate doidTriplePredicate = Expressions.makePredicate("doidTriple", 3); - final DataSource doidDataSource = new RdfFileDataSource( - new File(ExamplesUtils.INPUT_FOLDER + "doid.nt.gz")); - reasoner.addFactsFromDataSource(doidTriplePredicate, doidDataSource); - - /* Configure SPARQL data sources */ - final String sparqlHumansWithDisease = "?disease wdt:P699 ?doid ."; - // (wdt:P669 = "Disease Ontology ID") - final DataSource diseasesDataSource = new SparqlQueryResultDataSource(wikidataSparqlEndpoint, - "disease,doid", sparqlHumansWithDisease); - final Predicate diseaseIdPredicate = Expressions.makePredicate("diseaseId", 2); - reasoner.addFactsFromDataSource(diseaseIdPredicate, diseasesDataSource); - - final String sparqlRecentDeaths = "?human wdt:P31 wd:Q5; wdt:P570 ?deathDate . FILTER (YEAR(?deathDate) = 2018)"; - // (wdt:P31 = "instance of"; wd:Q5 = "human", wdt:570 = "date of death") - final DataSource recentDeathsDataSource = new SparqlQueryResultDataSource(wikidataSparqlEndpoint, "human", - sparqlRecentDeaths); - final Predicate recentDeathsPredicate = Expressions.makePredicate("recentDeaths", 1); - reasoner.addFactsFromDataSource(recentDeathsPredicate, recentDeathsDataSource); - - final String sparqlRecentDeathsCause = sparqlRecentDeaths + "?human wdt:P509 ?causeOfDeath . "; - // (wdt:P509 = "cause of death") - final DataSource recentDeathsCauseDataSource = new SparqlQueryResultDataSource(wikidataSparqlEndpoint, - "human,causeOfDeath", sparqlRecentDeathsCause); - final Predicate recentDeathsCausePredicate = Expressions.makePredicate("recentDeathsCause", 2); - reasoner.addFactsFromDataSource(recentDeathsCausePredicate, recentDeathsCauseDataSource); - /* Configure rules */ RuleParser ruleParser = new RuleParser(); try { @@ -97,6 +63,9 @@ public static void main(final String[] args) throws IOException { System.out.println("Failed to parse rules: " + e.getMessage()); return; } + for (Pair pair : ruleParser.getDataSources()) { + reasoner.addFactsFromDataSource(pair.getLeft(), pair.getRight()); + } reasoner.addRules(ruleParser.getRules()); System.out.println("Rules used in this example:"); reasoner.getRules().forEach(System.out::println); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java index 01b837497..bee2c4ef3 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java @@ -1,7 +1,5 @@ package org.semanticweb.vlog4j.examples.core; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeConstant; - /*- * #%L * VLog4j Examples @@ -22,20 +20,14 @@ * #L% */ -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePositiveLiteral; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePredicate; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeVariable; - -import java.io.File; import java.io.IOException; +import org.apache.commons.lang3.tuple.Pair; import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.reasoner.DataSource; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; @@ -58,23 +50,23 @@ * * @author Christian Lewe * @author Irina Dragoste + * @author Markus Kroetzsch * */ public class AddDataFromCsvFile { - public static void main(final String[] args) - throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + public static void main(final String[] args) throws EdbIdbSeparationException, IOException, ReasonerStateException, + IncompatiblePredicateArityException, ParsingException { ExamplesUtils.configureLogging(); - /* 1. Prepare rules and create some related vocabulary objects used later. */ - final Predicate bicycleEDB = makePredicate("bicycleEDB", 1); - final Predicate wheelEDB = makePredicate("wheelEDB", 1); - final Predicate hasPartIDB = makePredicate("hasPartIDB", 2); - final Predicate hasPartEDB = makePredicate("hasPartEDB", 2); + /* 1. Load data and prepare rules. */ - final String rules = "%%%% We specify the rules syntactically for convenience %%%\n" - // load all data from the file-based ("EDB") predicates: + final String rules = "" // first declare file inputs: + + "@source bicycleEDB(1) : load-csv(\"" + ExamplesUtils.INPUT_FOLDER + "bicycleEDB.csv.gz\") ." + + "@source hasPartEDB(2) : load-csv(\"" + ExamplesUtils.INPUT_FOLDER + "hasPartEDB.csv.gz\") ." + + "@source wheelEDB(1) : load-csv(\"" + ExamplesUtils.INPUT_FOLDER + "wheelEDB.csv.gz\") ." + // rules to load all data from the file-based ("EDB") predicates: + "bicycleIDB(?X) :- bicycleEDB(?X) ." // + "wheelIDB(?X) :- wheelEDB(?X) ." // + "hasPartIDB(?X, ?Y) :- hasPartEDB(?X, ?Y) ." // @@ -88,12 +80,7 @@ public static void main(final String[] args) + "isPartOfIDB(?X, ?Y) :- hasPartIDB(?Y, ?X) ."; RuleParser ruleParser = new RuleParser(); - try { - ruleParser.parse(rules); - } catch (ParsingException e) { - System.out.println("Failed to parse rules: " + e.getMessage()); - return; - } + ruleParser.parse(rules); /* * 2. Loading, reasoning, and querying while using try-with-resources to close @@ -101,39 +88,27 @@ public static void main(final String[] args) */ try (final Reasoner reasoner = Reasoner.getInstance()) { reasoner.addRules(ruleParser.getRules()); - - /* Importing {@code .csv} files as data sources. */ - final DataSource bicycleEDBDataSource = new CsvFileDataSource( - new File(ExamplesUtils.INPUT_FOLDER + "bicycleEDB.csv.gz")); - final DataSource hasPartDataSource = new CsvFileDataSource( - new File(ExamplesUtils.INPUT_FOLDER + "hasPartEDB.csv.gz")); - final DataSource wheelDataSource = new CsvFileDataSource( - new File(ExamplesUtils.INPUT_FOLDER + "wheelEDB.csv.gz")); - reasoner.addFactsFromDataSource(bicycleEDB, bicycleEDBDataSource); - reasoner.addFactsFromDataSource(hasPartEDB, hasPartDataSource); - reasoner.addFactsFromDataSource(wheelEDB, wheelDataSource); - + for (Pair pair : ruleParser.getDataSources()) { + reasoner.addFactsFromDataSource(pair.getLeft(), pair.getRight()); + } reasoner.load(); + System.out.println("Before materialisation:"); - final Variable x = makeVariable("X"); - final Variable y = makeVariable("Y"); - final PositiveLiteral hasPartEDBXY = makePositiveLiteral(hasPartEDB, x, y); - ExamplesUtils.printOutQueryAnswers(hasPartEDBXY, reasoner); + ExamplesUtils.printOutQueryAnswers("hasPartEDB(?X, ?Y)", reasoner); /* The reasoner will use the Restricted Chase by default. */ reasoner.reason(); System.out.println("After materialisation:"); - final PositiveLiteral hasPartIDBXY = makePositiveLiteral(hasPartIDB, x, y); - ExamplesUtils.printOutQueryAnswers(hasPartIDBXY, reasoner); + final PositiveLiteral hasPartIdbXY = ruleParser.parsePositiveLiteral("hasPartIDB(?X, ?Y)"); + ExamplesUtils.printOutQueryAnswers(hasPartIdbXY, reasoner); /* 3. Exporting query answers to {@code .csv} files. */ - reasoner.exportQueryAnswersToCsv(hasPartIDBXY, ExamplesUtils.OUTPUT_FOLDER + "hasPartIDBXYWithBlanks.csv", + reasoner.exportQueryAnswersToCsv(hasPartIdbXY, ExamplesUtils.OUTPUT_FOLDER + "hasPartIDBXYWithBlanks.csv", true); - reasoner.exportQueryAnswersToCsv(hasPartIDBXY, + reasoner.exportQueryAnswersToCsv(hasPartIdbXY, ExamplesUtils.OUTPUT_FOLDER + "hasPartIDBXYWithoutBlanks.csv", false); - final Constant redBike = makeConstant("redBike"); - final PositiveLiteral hasPartIDBRedBikeY = makePositiveLiteral(hasPartIDB, redBike, y); + final PositiveLiteral hasPartIDBRedBikeY = ruleParser.parsePositiveLiteral("hasPartIDB(redBike, ?Y)"); reasoner.exportQueryAnswersToCsv(hasPartIDBRedBikeY, ExamplesUtils.OUTPUT_FOLDER + "hasPartIDBRedBikeYWithBlanks.csv", true); } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java index a7903ba54..9d8d690d5 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java @@ -1,7 +1,5 @@ package org.semanticweb.vlog4j.examples.core; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeConstant; - /*- * #%L * VLog4j Examples @@ -22,20 +20,14 @@ * #L% */ -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePositiveLiteral; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePredicate; -import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeVariable; - -import java.io.File; import java.io.IOException; +import org.apache.commons.lang3.tuple.Pair; import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.reasoner.DataSource; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; @@ -60,25 +52,27 @@ * such an {@code .nt} file. *

    * For exporting, a path to the output {@code .csv} file must be specified. + *

    + * Exception handling is omitted for simplicity. * * @author Christian Lewe + * @author Markus Kroetzsch * */ public class AddDataFromRdfFile { - public static void main(final String[] args) - throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + public static void main(final String[] args) throws EdbIdbSeparationException, IOException, ReasonerStateException, + IncompatiblePredicateArityException, ParsingException { ExamplesUtils.configureLogging(); /* 1. Prepare rules and create some related vocabulary objects used later. */ - final Predicate triplesEDB = makePredicate("triplesEDB", 3); // predicate to load RDF - final Predicate triplesIDB = makePredicate("triplesIDB", 3); // predicate for inferred triples - final Constant hasPartPredicate = makeConstant("https://example.org/hasPart"); // RDF property used in query - final String rules = "%%%% We specify the rules syntactically for convenience %%%\n" + final String rules = "" // first define some namespaces and abbreviations: + "@prefix ex: ." + "@prefix rdf: ." - // load all triples from file: + // specify data sources: + + "@source triplesEDB(3) : load-rdf(\"" + ExamplesUtils.INPUT_FOLDER + "ternaryBicycleEDB.nt.gz\") ." + // rule for loading all triples from file: + "triplesIDB(?S, ?P, ?O) :- triplesEDB(?S, ?P, ?O) ." // every bicycle has some part that is a wheel: + "triplesIDB(?S, ex:hasPart, !X), triplesIDB(!X, rdf:type, ex:wheel) :- triplesIDB(?S, rdf:type, ex:bicycle) ." @@ -89,12 +83,7 @@ public static void main(final String[] args) + "triplesIDB(?S, ex:hasPart, ?O) :- triplesIDB(?O, ex:isPartOf, ?S) ."; RuleParser ruleParser = new RuleParser(); - try { - ruleParser.parse(rules); - } catch (ParsingException e) { - System.out.println("Failed to parse rules: " + e.getMessage()); - return; - } + ruleParser.parse(rules); /* * 2. Loading, reasoning, querying and exporting, while using try-with-resources @@ -102,23 +91,20 @@ public static void main(final String[] args) */ try (final Reasoner reasoner = Reasoner.getInstance()) { reasoner.addRules(ruleParser.getRules()); - - /* Importing {@code .nt.gz} file as data source. */ - final DataSource triplesEDBDataSource = new RdfFileDataSource( - new File(ExamplesUtils.INPUT_FOLDER + "ternaryBicycleEDB.nt.gz")); - reasoner.addFactsFromDataSource(triplesEDB, triplesEDBDataSource); - + for (Pair pair : ruleParser.getDataSources()) { + reasoner.addFactsFromDataSource(pair.getLeft(), pair.getRight()); + } reasoner.load(); + System.out.println("Before materialisation:"); - final Variable x = makeVariable("X"); - final Variable y = makeVariable("Y"); - final PositiveLiteral hasPartEDB = makePositiveLiteral(triplesEDB, x, hasPartPredicate, y); - ExamplesUtils.printOutQueryAnswers(hasPartEDB, reasoner); + + ExamplesUtils.printOutQueryAnswers("triplesEDB(?X, , ?Y)", reasoner); /* The reasoner will use the Restricted Chase by default. */ reasoner.reason(); System.out.println("After materialisation:"); - final PositiveLiteral hasPartIDB = makePositiveLiteral(triplesIDB, x, hasPartPredicate, y); + final PositiveLiteral hasPartIDB = ruleParser + .parsePositiveLiteral("triplesIDB(?X, , ?Y)"); ExamplesUtils.printOutQueryAnswers(hasPartIDB, reasoner); /* Exporting query answers to {@code .csv} files. */ @@ -127,9 +113,8 @@ public static void main(final String[] args) reasoner.exportQueryAnswersToCsv(hasPartIDB, ExamplesUtils.OUTPUT_FOLDER + "ternaryHasPartIDBWithoutBlanks.csv", false); - final Constant redBikeSubject = makeConstant("https://example.org/redBike"); - final PositiveLiteral existsHasPartRedBike = makePositiveLiteral(triplesIDB, redBikeSubject, - hasPartPredicate, x); + final PositiveLiteral existsHasPartRedBike = ruleParser.parsePositiveLiteral( + "triplesIDB(, , ?X)"); reasoner.exportQueryAnswersToCsv(existsHasPartRedBike, ExamplesUtils.OUTPUT_FOLDER + "existsHasPartIDBRedBikeWithBlanks.csv", true); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index 27efc7638..58068d124 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -24,10 +24,13 @@ import java.io.InputStream; import java.util.List; +import org.apache.commons.lang3.tuple.Pair; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.reasoner.DataSource; import org.semanticweb.vlog4j.parser.javacc.JavaCCParser; import org.semanticweb.vlog4j.parser.javacc.ParseException; import org.semanticweb.vlog4j.parser.javacc.TokenMgrError; @@ -95,4 +98,9 @@ public List getRules() { public List getFacts() { return parser.getFacts(); } + + public List> getDataSources() { + return parser.getDataSources(); + } + } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index cac2b4bcc..6724e5aca 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -13,11 +13,17 @@ options PARSER_BEGIN(JavaCCParser) package org.semanticweb.vlog4j.parser.javacc; +import java.io.File; +import java.io.IOException; +import java.net.URL; +import java.net.MalformedURLException; + import java.util.List; import java.util.ArrayList; import org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Literal; @@ -25,6 +31,10 @@ import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.reasoner.DataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; import org.semanticweb.vlog4j.core.model.implementation.Expressions; @@ -42,6 +52,7 @@ void parse() throws PrefixDeclarationException: { ( base() )? ( prefix() )* + ( source() )* ( statement() )* < EOF > } @@ -63,13 +74,69 @@ void prefix() throws PrefixDeclarationException: String iriString; } { - < PREFIX > t = < PNAME_NS > iriString = IRIREF() < DOT > + ( + LOOKAHEAD(< COLON >) < PREFIX > t = < COLON > iriString = IRIREF() < DOT > + | < PREFIX > t = < PNAME_NS > iriString = IRIREF() < DOT > + ) { //note that prefix includes the colon (:) prefixDeclarations.setPrefix(t.image, iriString); } } +void source() throws PrefixDeclarationException: +{ + String predicateName; + DataSource dataSource; + Token arity; +} +{ + < SOURCE > predicateName = predicateName() < LPAREN > arity = < INTEGER > < RPAREN > < COLON > dataSource = dataSource() < DOT > + { + int nArity; + nArity = Integer.parseInt(arity.image); + // Do not catch NumberFormatException: < INTEGER > matches must parse as int in Java! + if ( dataSource instanceof RdfFileDataSource && nArity != 3 ) + throw new ParseException("Cannot load RDF data into predicate of arity " + nArity +"."); + addDataSource(predicateName, nArity,dataSource); + } +} + +DataSource dataSource() throws PrefixDeclarationException: +{ + String fileName; + String endpoint; + String variables; + String query; +} +{ + < LOADCSV > < LPAREN > fileName = String() < RPAREN > + { + try { + return new CsvFileDataSource(new File(fileName)) ; + } catch (IOException e) { + throw new ParseException("Could not use source file \"" + fileName +"\": " + e.getMessage()); + } + } +| < LOADRDF > < LPAREN > fileName = String() < RPAREN > + { + try { + return new RdfFileDataSource(new File(fileName)) ; + } catch (IOException e) { + throw new ParseException("Could not use source file \"" + fileName +"\": " + e.getMessage()); + } + } +| < SPARQL > < LPAREN > endpoint = IRI(false) < COMMA > variables = String() < COMMA > query = String() < RPAREN > + { + try { + return new SparqlQueryResultDataSource(new URL(endpoint), variables, query); + } catch (MalformedURLException e) { + throw new ParseException("SPARQL endoint \"" + endpoint +"\" is not a valid URL: " + e.getMessage()); + } + } +} + + void statement() throws PrefixDeclarationException: { Rule r; @@ -302,10 +369,10 @@ String PrefixedName() throws PrefixDeclarationException: Token t; } { - ( + //( t = < PNAME_LN > - | t = < PNAME_NS > - ) + //| t = < PNAME_NS > + //) { return prefixDeclarations.resolvePrefixedName(t.image);} //{ return prefixDeclarations.resolvePrefixedName(t.image, t.beginLine, t.beginColumn);} } @@ -361,16 +428,16 @@ TOKEN : { < PREFIX : "@prefix" > | < BASE : "@base" > +| < SOURCE : "@source" > +| < LOADCSV : "load-csv"> +| < LOADRDF : "load-rdf"> +| < SPARQL : "sparql"> } TOKEN [ IGNORE_CASE ] : { - < TRUE : "'true'" > -| < FALSE : "'false'" > - // ------------------------------------------------- -| < INTEGER : ([ "-", "+" ])? < DIGITS > > -| - < DECIMAL : + < INTEGER : ([ "-", "+" ])? < DIGITS > > +| < DECIMAL : ([ "-", "+" ])? ( (< DIGITS >)+ "." (< DIGITS >)* @@ -445,8 +512,8 @@ TOKEN : { // Includes # for relative URIs < IRI : "<" (~[ ">", "<", "\"", "{", "}", "^", "\\", "|", "`", "\u0000"-"\u0020" ])* ">" > -| < PNAME_NS : (< PN_PREFIX >)? ":" > -| < PNAME_LN : < PNAME_NS > < PN_LOCAL > > +| < PNAME_LN : (< PN_PREFIX >)? ":" < PN_LOCAL > > +| < PNAME_NS : < PN_PREFIX > ":" > | < BLANK_NODE_LABEL : "_:" < PN_LOCAL > > | < UNIVAR : "?" < VARORPREDNAME > > | < EXIVAR : "!" < VARORPREDNAME > > diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index b325ad9bd..03000e018 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -53,7 +53,7 @@ public class JavaCCParserBase { final protected List rules = new ArrayList<>(); final protected List facts = new ArrayList<>(); -// final protected List> dataSources = new ArrayList<>(); + final protected List> dataSources = new ArrayList<>(); /** * "Local" variable to remember (universal) body variables during parsing. @@ -98,6 +98,11 @@ protected Constant createDecimalLiteral(String lexicalForm) { protected Constant createDoubleLiteral(String lexicalForm) { return Expressions.makeConstant(lexicalForm + "^^<" + PrefixDeclarations.XSD_DOUBLE + ">"); } + + void addDataSource(String predicateName, int arity, DataSource dataSource) { + Predicate predicate = Expressions.makePredicate(predicateName, arity); + dataSources.add(Pair.of(predicate,dataSource)); + } protected static String unescapeStr(String s, int line, int column) throws ParseException { return unescape(s, '\\', false, line, column); @@ -220,5 +225,9 @@ public List getRules() { public List getFacts() { return facts; } + + public List> getDataSources() { + return dataSources; + } } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 92ed730e5..bc8c0b9db 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -22,6 +22,10 @@ import static org.junit.Assert.*; +import java.io.File; +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; import java.util.Arrays; import org.junit.Test; @@ -29,10 +33,14 @@ import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; @@ -376,4 +384,48 @@ public void tesLiteralError() throws ParsingException { ruleParser.parseLiteral(input); } + @Test + public void testCsvSource() throws ParsingException, IOException { + String input = "@source p(2) : load-csv(\"src/main/data/input/example.csv\") ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + CsvFileDataSource csvds = new CsvFileDataSource(new File("src/main/data/input/example.csv")); + Predicate p = Expressions.makePredicate("p", 2); + assertEquals(1, ruleParser.getDataSources().size()); + assertEquals(p, ruleParser.getDataSources().get(0).getLeft()); + assertEquals(csvds, ruleParser.getDataSources().get(0).getRight()); + } + + @Test + public void testRdfSource() throws ParsingException, IOException { + String input = "@source p(3) : load-rdf(\"src/main/data/input/example.nt.gz\") ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + RdfFileDataSource rdfds = new RdfFileDataSource(new File("src/main/data/input/example.nt.gz")); + Predicate p = Expressions.makePredicate("p", 3); + assertEquals(1, ruleParser.getDataSources().size()); + assertEquals(p, ruleParser.getDataSources().get(0).getLeft()); + assertEquals(rdfds, ruleParser.getDataSources().get(0).getRight()); + } + + @Test(expected = ParsingException.class) + public void testRdfSourceInvalidArity() throws ParsingException, IOException { + String input = "@source p(2) : load-rdf(\"src/main/data/input/example.nt.gz\") ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + } + + @Test + public void testSparqlSource() throws ParsingException, MalformedURLException { + String input = "@source p(2) : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + SparqlQueryResultDataSource sparqlds = new SparqlQueryResultDataSource( + new URL("https://query.wikidata.org/sparql"), "disease, doid", "?disease wdt:P699 ?doid ."); + Predicate p = Expressions.makePredicate("p", 2); + assertEquals(1, ruleParser.getDataSources().size()); + assertEquals(p, ruleParser.getDataSources().get(0).getLeft()); + assertEquals(sparqlds, ruleParser.getDataSources().get(0).getRight()); + } + } From c431c6be6293b736289c93dd931d59eb9bcab662 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 19 Aug 2019 19:11:07 +0200 Subject: [PATCH 0110/1003] transform counting triangles example into new syntax (using @source directives) --- .../main/data/input/counting-triangles.rls | 15 ++++++ .../vlog4j/examples/CountingTriangles.java | 49 +++++++------------ 2 files changed, 33 insertions(+), 31 deletions(-) create mode 100644 vlog4j-examples/src/main/data/input/counting-triangles.rls diff --git a/vlog4j-examples/src/main/data/input/counting-triangles.rls b/vlog4j-examples/src/main/data/input/counting-triangles.rls new file mode 100644 index 000000000..338255aa0 --- /dev/null +++ b/vlog4j-examples/src/main/data/input/counting-triangles.rls @@ -0,0 +1,15 @@ +@prefix wdqs: . + +@source shareBorderWith(2): sparql(wdqs:sparql, "country1,country2", + '''?country1 wdt:P31 wd:Q6256 . + ?country2 wdt:P31 wd:Q6256 . + ?country1 wdt:P47 ?country2 .''') . + + +% compute the reflexive relation of SharingBorders +reflexiveShareBorderWith(?X,?Y) :- shareBorderWith(?X,?Y) . +reflexiveShareBorderWith(?Y,?X) :- reflexiveShareBorderWith(?X,?Y) . + +% compute the triangles +triangle(?X,?Y,?Z) :- reflexiveShareBorderWith(?X,?Y), reflexiveShareBorderWith(?Y,?Z), reflexiveShareBorderWith(?Z,?X) . + diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java index 2799f6b0b..662b33c17 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.examples; +import java.io.FileInputStream; + /*- * #%L * VLog4j Examples @@ -21,17 +23,15 @@ */ import java.io.IOException; -import java.net.URL; +import org.apache.commons.lang3.tuple.Pair; import org.semanticweb.vlog4j.core.exceptions.VLog4jException; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.DataSource; import org.semanticweb.vlog4j.core.reasoner.LogLevel; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; -import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; @@ -46,50 +46,37 @@ public class CountingTriangles { public static void main(final String[] args) throws IOException { - - final URL wikidataSparqlEndpoint = new URL("https://query.wikidata.org/sparql"); - - ExamplesUtils.configureLogging(); // use simple logger for the example + ExamplesUtils.configureLogging(); try (final Reasoner reasoner = Reasoner.getInstance()) { reasoner.setLogFile(ExamplesUtils.OUTPUT_FOLDER + "vlog.log"); reasoner.setLogLevel(LogLevel.DEBUG); - // (wdt:P47 = "Sharing border with") - // list of sharing border countries - final String sparqlCountriesSharingBorders = "?country1 wdt:P31 wd:Q6256 . ?country2 wdt:P31 wd:Q6256 . ?country1 wdt:P47 ?country2 ."; - - final DataSource sharingBordersDataSource = new SparqlQueryResultDataSource(wikidataSparqlEndpoint, - "country1,country2", sparqlCountriesSharingBorders); - final Predicate sharingBordersPredicate = Expressions.makePredicate("sharingBorders", 2); - reasoner.addFactsFromDataSource(sharingBordersPredicate, sharingBordersDataSource); - - // We compute the reflexive relation from "sharingBorders", and then the - // triangle relation - String rules = "reflexiveBorder(?X,?Y) :- sharingBorders(?X,?Y) .\n" - + "reflexiveBorder(?Y,?X) :- reflexiveBorder(?X,?Y) .\n" - + "triangle(?X,?Y,?Z) :- reflexiveBorder(?X,?Y), reflexiveBorder(?Y,?Z), reflexiveBorder(?Z,?X) . \n"; - + /* Configure rules */ RuleParser ruleParser = new RuleParser(); try { - ruleParser.parse(rules); + ruleParser.parse(new FileInputStream(ExamplesUtils.INPUT_FOLDER + "/counting-triangles.rls")); } catch (ParsingException e) { System.out.println("Failed to parse rules: " + e.getMessage()); return; } - + for (Pair pair : ruleParser.getDataSources()) { + reasoner.addFactsFromDataSource(pair.getLeft(), pair.getRight()); + } reasoner.addRules(ruleParser.getRules()); - - System.out.println("Rules configured:\n--"); + System.out.println("Rules used in this example:"); reasoner.getRules().forEach(System.out::println); - System.out.println("--"); + System.out.println(""); + /* Initialise reasoner and compute inferences */ + System.out.print("Initialising rules and data sources ... "); reasoner.load(); + System.out.println("completed."); - System.out.println("Loading completed."); - System.out.println("Starting reasoning ..."); + System.out.print("Reasoning (including SPARQL query answering) ... "); reasoner.reason(); - System.out.println("... reasoning completed.\n--"); + System.out.println("completed."); + /* Execute a query */ try { @@ -97,7 +84,7 @@ public static void main(final String[] args) throws IOException { QueryResultIterator answers = reasoner.answerQuery(query, true); // Note that we divide it by 6 System.out.println("The number of triangles in the sharesBorderWith relation (from Wikidata) is: " - + ": " + ExamplesUtils.iteratorSize(answers) / 6); + + ExamplesUtils.iteratorSize(answers) / 6); } catch (ParsingException e) { System.out.println("Failed to parse query: " + e.getMessage()); } From 6a76d1ddac333938b1fb9a3cc72782a7feb11d81 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 19 Aug 2019 19:35:09 +0200 Subject: [PATCH 0111/1003] cleanup --- .../vlog4j/parser/javacc/JavaCCParser.jj | 109 +++--------------- 1 file changed, 18 insertions(+), 91 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 6724e5aca..30ca77539 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -256,17 +256,13 @@ String predicateName() throws PrefixDeclarationException: | t = < VARORPREDNAME > { return prefixDeclarations.absolutize(t.image); } } -// we use an int to specify where a variable comes from -// 0 if the variable does not come from a rule -// 1 if the variable comes from the head of a rule -// 2 if the variable comes from the body of a rule Term term(FormulaContext context) throws PrefixDeclarationException: { Token t; String s; Constant c; } -{ +{ //TODO move Expressions.makeConstant to JavaCCParserBase s = IRI(false) { return Expressions.makeConstant(s); } | c = NumericLiteral() { return c; } | s = RDFLiteral() { return Expressions.makeConstant(s); } @@ -277,7 +273,7 @@ Term term(FormulaContext context) throws PrefixDeclarationException: headUniVars.add(s); else if (context == FormulaContext.BODY) bodyVars.add(s); - return Expressions.makeVariable(t.image.substring(1)); + return Expressions.makeVariable(s); } | t = < EXIVAR > { @@ -286,7 +282,7 @@ Term term(FormulaContext context) throws PrefixDeclarationException: headExiVars.add(s); if (context == FormulaContext.BODY) throw new ParseException("Existentialy quantified variables can not appear in the body. Line: " + t.beginLine + ", Column: "+ t.beginColumn); - return Expressions.makeVariable(t.image.substring(1)); + return Expressions.makeVariable(s); } | t = < VARORPREDNAME > { return Expressions.makeConstant(prefixDeclarations.absolutize(t.image));} } @@ -356,12 +352,13 @@ String IRI(boolean includeAngleBrackets) throws PrefixDeclarationException: iri = IRIREF() | iri = PrefixedName() ) - { String result = prefixDeclarations.absolutize(iri); - if (includeAngleBrackets) - result = "<"+result+">"; - return result; - //return "<"+prefixDeclarations.absolutize(iri)+">"; - } + { + String result = prefixDeclarations.absolutize(iri); + if (includeAngleBrackets) { + result = "<"+result+">"; + } + return result; + } } String PrefixedName() throws PrefixDeclarationException: @@ -390,8 +387,7 @@ String IRIREF() : } // ------------------------------------------ -// Tokens -// Comments and whitespace +// Whitespace SKIP : { " " @@ -401,29 +397,10 @@ SKIP : | "\f" } -TOKEN : -{ - < #WS : - " " - | "\t" - | "\n" - | "\r" - | "\f" > -} - -SPECIAL_TOKEN : -{ - < SINGLE_LINE_COMMENT : - "#" (~[ "\n", "\r" ])* - ( - "\n" - | "\r" - | "\r\n" - )? > -} +//Comments +SKIP :{< "%" (~["\n"])* "\n" >} -// ------------------------------------------------- -// Keywords : directives before LANGTAG +// ------------------------------------------ TOKEN : { < PREFIX : "@prefix" > @@ -444,7 +421,6 @@ TOKEN [ IGNORE_CASE ] : | "." (< DIGITS >)+ ) > - // Required exponent. | < DOUBLE : ([ "+", "-" ])? ( @@ -456,8 +432,6 @@ TOKEN [ IGNORE_CASE ] : | < #EXPONENT : [ "e", "E" ] ([ "+", "-" ])? ([ "0"-"9" ])+ > | < #QUOTE_3D : "\"\"\"" > | < #QUOTE_3S : "'''" > - // "u" done by javacc input stream. - // "U" escapes not supported yet for Java strings | < ECHAR : "\\" ( @@ -505,7 +479,6 @@ TOKEN [ IGNORE_CASE ] : )* < QUOTE_3D > > | < DIGITS : ([ "0"-"9" ])+ > - // | } TOKEN : @@ -514,9 +487,8 @@ TOKEN : < IRI : "<" (~[ ">", "<", "\"", "{", "}", "^", "\\", "|", "`", "\u0000"-"\u0020" ])* ">" > | < PNAME_LN : (< PN_PREFIX >)? ":" < PN_LOCAL > > | < PNAME_NS : < PN_PREFIX > ":" > -| < BLANK_NODE_LABEL : "_:" < PN_LOCAL > > -| < UNIVAR : "?" < VARORPREDNAME > > -| < EXIVAR : "!" < VARORPREDNAME > > +| < UNIVAR : < QMARK > < VARORPREDNAME > > +| < EXIVAR : < EMARK > < VARORPREDNAME > > | < LANGTAG : < AT > (< A2Z >)+ ( @@ -530,46 +502,13 @@ TOKEN : { < LPAREN : "(" > | < RPAREN : ")" > -| < NIL : - < LPAREN > - ( - < WS > - | < SINGLE_LINE_COMMENT > - )* - < RPAREN > > -| < LBRACE : "{" > -| < RBRACE : "}" > -| < LBRACKET : "[" > -| < RBRACKET : "]" > -| < ANON : - < LBRACKET > - ( - < WS > - | < SINGLE_LINE_COMMENT > - )* - < RBRACKET > > -| < SEMICOLON : ";" > | < COMMA : "," > | < DOT : "." > -} - -// Operator -TOKEN : -{ - < EQ : "=" > | < ARROW : ":-" > -| < DOLLAR : "$" > | < QMARK : "?" > +| < EMARK : "!" > | < TILDE : "~" > | < COLON : ":" > - // | < PLUS: "+" > - // | < MINUS: "-" > -| < STAR : "*" > -| < SLASH : "/" > -| < RSLASH : "\\" > -| < BOM : "\ufeff" > - //| < AMP: "&" > - //| < REM: "%" > | < DATATYPE : "^^" > | < AT : "@" > } @@ -636,9 +575,7 @@ TOKEN : )* < PN_CHARS > )? > -| - // NCNAME without "-" and ".", allowing leading digits. - < VARORPREDNAME : +| < VARORPREDNAME : ( < PN_CHARS_U > | [ "0"-"9" ] @@ -652,13 +589,3 @@ TOKEN : )* > } -// Catch-all tokens. Must be last. -// Any non-whitespace. Causes a parser exception, rather than a -// token manager error (with hidden line numbers). -// Only bad IRIs (e.g. spaces) now give unhelpful parse errors. -TOKEN : -{ - < #UNKNOWN : (~[ " ", "\t", "\n", "\r", "\f" ])+ > -} - -SKIP :{< "%" (~["\n"])* "\n" >} From 90cda308e1f48c2cc086499a6343fc8904b572c2 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 19 Aug 2019 20:28:34 +0200 Subject: [PATCH 0112/1003] cleanup --- .../vlog4j/parser/javacc/JavaCCParser.jj | 88 ++++++++----------- 1 file changed, 35 insertions(+), 53 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 30ca77539..a81f9a822 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -411,7 +411,7 @@ TOKEN : | < SPARQL : "sparql"> } -TOKEN [ IGNORE_CASE ] : +TOKEN: { < INTEGER : ([ "-", "+" ])? < DIGITS > > | < DECIMAL : @@ -429,22 +429,13 @@ TOKEN [ IGNORE_CASE ] : | ([ "0"-"9" ])+ < EXPONENT > ) > +| < #DIGITS : ([ "0"-"9" ])+ > | < #EXPONENT : [ "e", "E" ] ([ "+", "-" ])? ([ "0"-"9" ])+ > -| < #QUOTE_3D : "\"\"\"" > -| < #QUOTE_3S : "'''" > -| < ECHAR : - "\\" - ( - "t" - | "b" - | "n" - | "r" - | "f" - | "\\" - | "\"" - | "'" - ) > -| < STRING_LITERAL1 : +} + +TOKEN: +{ + < STRING_LITERAL1 : // Single quoted string "'" ( @@ -461,24 +452,35 @@ TOKEN [ IGNORE_CASE ] : )* "\"" > | < STRING_LITERAL_LONG1 : - < QUOTE_3S > + "'''" ( ~[ "'", "\\" ] | < ECHAR > | ("'" ~[ "'" ]) | ("''" ~[ "'" ]) )* - < QUOTE_3S > > + "'''" > | < STRING_LITERAL_LONG2 : - < QUOTE_3D > + "\"\"\"" ( ~[ "\"", "\\" ] | < ECHAR > | ("\"" ~[ "\"" ]) | ("\"\"" ~[ "\"" ]) )* - < QUOTE_3D > > -| < DIGITS : ([ "0"-"9" ])+ > + "\"\"\"" > +| < #ECHAR : + "\\" + ( + "t" + | "b" + | "n" + | "r" + | "f" + | "\\" + | "\"" + | "'" + ) > } TOKEN : @@ -494,6 +496,7 @@ TOKEN : ( "-" (< A2ZN >)+ )* > +| < VARORPREDNAME : < A2Z> (< A2ZN >)* > | < #A2Z : [ "a"-"z", "A"-"Z" ] > | < #A2ZN : [ "a"-"z", "A"-"Z", "0"-"9" ] > } @@ -518,41 +521,32 @@ TOKEN : < #PN_CHARS_BASE : [ "A"-"Z" ] | [ "a"-"z" ] - | - [ "\u00c0"-"\u00d6" ] + | [ "\u00c0"-"\u00d6" ] | [ "\u00d8"-"\u00f6" ] | [ "\u00f8"-"\u02ff" ] - | - [ "\u0370"-"\u037d" ] + | [ "\u0370"-"\u037d" ] | [ "\u037f"-"\u1fff" ] - | - [ "\u200c"-"\u200d" ] + | [ "\u200c"-"\u200d" ] | [ "\u2070"-"\u218f" ] | [ "\u2c00"-"\u2fef" ] - | - [ "\u3001"-"\ud7ff" ] + | [ "\u3001"-"\ud7ff" ] | [ "\uf900"-"\ufffd" ] > - // [#x10000-#xEFFFF] + // | [ ""#x10000-#xEFFFF] | < #PN_CHARS_U : < PN_CHARS_BASE > | "_" > -| - // No DOT - < #PN_CHARS : +| < #PN_CHARS : ( < PN_CHARS_U > | "-" | [ "0"-"9" ] | "\u00b7" - | - [ "\u0300"-"\u036f" ] + | [ "\u0300"-"\u036f" ] | [ "\u203f"-"\u2040" ] ) > -| - // No leading "_", no trailing ".", can have dot inside prefix name. - < #PN_PREFIX : +| < #PN_PREFIX : < PN_CHARS_BASE > ( ( @@ -561,31 +555,19 @@ TOKEN : )* < PN_CHARS > )? > -| - // With a leading "_", no dot at end of local name. - < #PN_LOCAL : +| < #PN_LOCAL : ( < PN_CHARS_U > + | ":" | [ "0"-"9" ] ) ( ( < PN_CHARS > | "." + | ":" )* < PN_CHARS > )? > -| < VARORPREDNAME : - ( - < PN_CHARS_U > - | [ "0"-"9" ] - ) - ( - < PN_CHARS_U > - | [ "0"-"9" ] - | "\u00b7" - | [ "\u0300"-"\u036f" ] - | [ "\u203f"-"\u2040" ] - )* > } From 71788624300f07cb50cdca752c0e098b8ed1e6df Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 19 Aug 2019 21:52:27 +0200 Subject: [PATCH 0113/1003] Improved triangle example --- .../main/data/input/counting-triangles.rls | 19 +++++++----- .../vlog4j/examples/CountingTriangles.java | 30 ++++++++++--------- 2 files changed, 27 insertions(+), 22 deletions(-) diff --git a/vlog4j-examples/src/main/data/input/counting-triangles.rls b/vlog4j-examples/src/main/data/input/counting-triangles.rls index 338255aa0..223c4a77a 100644 --- a/vlog4j-examples/src/main/data/input/counting-triangles.rls +++ b/vlog4j-examples/src/main/data/input/counting-triangles.rls @@ -1,15 +1,18 @@ @prefix wdqs: . -@source shareBorderWith(2): sparql(wdqs:sparql, "country1,country2", - '''?country1 wdt:P31 wd:Q6256 . - ?country2 wdt:P31 wd:Q6256 . +% From Wikidata, get all countries (items with P31 relation to Q6256, or subclasses thereof: P279*) +% that border (P47) each other: +@source borders(2): sparql(wdqs:sparql, "country1,country2", + '''?country1 wdt:P31/wdt:P279* wd:Q6256 . + ?country2 wdt:P31/wdt:P279* wd:Q6256 . ?country1 wdt:P47 ?country2 .''') . +% Compute the symmetric closure of borders: +shareBorder(?X, ?Y) :- borders(?X, ?Y) . +shareBorder(?X, ?Y) :- borders(?Y, ?X) . -% compute the reflexive relation of SharingBorders -reflexiveShareBorderWith(?X,?Y) :- shareBorderWith(?X,?Y) . -reflexiveShareBorderWith(?Y,?X) :- reflexiveShareBorderWith(?X,?Y) . +country(?X) :- shareBorder(?X, ?Y) . -% compute the triangles -triangle(?X,?Y,?Z) :- reflexiveShareBorderWith(?X,?Y), reflexiveShareBorderWith(?Y,?Z), reflexiveShareBorderWith(?Z,?X) . +% Compute all triangles: +triangle(?X,?Y,?Z) :- shareBorder(?X,?Y), shareBorder(?Y,?Z), shareBorder(?Z,?X) . diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java index 662b33c17..454307e5b 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java @@ -29,9 +29,7 @@ import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.reasoner.DataSource; -import org.semanticweb.vlog4j.core.reasoner.LogLevel; import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; @@ -49,13 +47,10 @@ public static void main(final String[] args) throws IOException { ExamplesUtils.configureLogging(); try (final Reasoner reasoner = Reasoner.getInstance()) { - reasoner.setLogFile(ExamplesUtils.OUTPUT_FOLDER + "vlog.log"); - reasoner.setLogLevel(LogLevel.DEBUG); - /* Configure rules */ RuleParser ruleParser = new RuleParser(); try { - ruleParser.parse(new FileInputStream(ExamplesUtils.INPUT_FOLDER + "/counting-triangles.rls")); + ruleParser.parse(new FileInputStream(ExamplesUtils.INPUT_FOLDER + "counting-triangles.rls")); } catch (ParsingException e) { System.out.println("Failed to parse rules: " + e.getMessage()); return; @@ -77,21 +72,28 @@ public static void main(final String[] args) throws IOException { reasoner.reason(); System.out.println("completed."); - - /* Execute a query */ + /* Execute queries */ try { - PositiveLiteral query = ruleParser.parsePositiveLiteral("triangle(?X,?Y,?Z)"); - QueryResultIterator answers = reasoner.answerQuery(query, true); - // Note that we divide it by 6 - System.out.println("The number of triangles in the sharesBorderWith relation (from Wikidata) is: " - + ExamplesUtils.iteratorSize(answers) / 6); + PositiveLiteral query; + + query = ruleParser.parsePositiveLiteral("country(?X)"); + System.out.print("Found " + ExamplesUtils.iteratorSize(reasoner.answerQuery(query, true)) + + " countries in Wikidata"); + // Due to symmetry, each joint border is found twice, hence we divide by 2: + query = ruleParser.parsePositiveLiteral("shareBorder(?X,?Y)"); + System.out.println(", with " + ExamplesUtils.iteratorSize(reasoner.answerQuery(query, true)) / 2 + + " pairs of them sharing a border."); + // Due to symmetry, each triangle is found six times, hence we divide by 6: + query = ruleParser.parsePositiveLiteral("triangle(?X,?Y,?Z)"); + System.out.println("The number of triangles of countries that mutually border each other was " + + ExamplesUtils.iteratorSize(reasoner.answerQuery(query, true)) / 6 + "."); } catch (ParsingException e) { System.out.println("Failed to parse query: " + e.getMessage()); } System.out.println("Done."); } catch (VLog4jException e) { - System.out.println("The reasoner encountered a problem:" + e.getMessage()); + System.out.println("The reasoner encountered a problem: " + e.getMessage()); } } From 615f24708daceaa3f4634257725b9c40f5e51c63 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 20 Aug 2019 09:13:51 +0200 Subject: [PATCH 0114/1003] simplified access restrictions --- .../parser/javacc/JavaCCParserBase.java | 41 +++++++++---------- 1 file changed, 20 insertions(+), 21 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 03000e018..c73cc2cd7 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -49,24 +49,24 @@ * */ public class JavaCCParserBase { - final protected PrefixDeclarations prefixDeclarations = new LocalPrefixDeclarations(); + final PrefixDeclarations prefixDeclarations = new LocalPrefixDeclarations(); - final protected List rules = new ArrayList<>(); - final protected List facts = new ArrayList<>(); - final protected List> dataSources = new ArrayList<>(); + final List rules = new ArrayList<>(); + final List facts = new ArrayList<>(); + final List> dataSources = new ArrayList<>(); /** * "Local" variable to remember (universal) body variables during parsing. */ - final protected HashSet bodyVars = new HashSet(); + final HashSet bodyVars = new HashSet(); /** * "Local" variable to remember existential head variables during parsing. */ - final protected HashSet headExiVars = new HashSet();; + final HashSet headExiVars = new HashSet();; /** * "Local" variable to remember universal head variables during parsing. */ - final protected HashSet headUniVars = new HashSet();; + final HashSet headUniVars = new HashSet();; /** * Defines the context for parsing sub-formulas. @@ -87,29 +87,28 @@ public enum FormulaContext { BODY } - protected Constant createIntegerLiteral(String lexicalForm) { + Constant createIntegerLiteral(String lexicalForm) { return Expressions.makeConstant(lexicalForm + "^^<" + PrefixDeclarations.XSD_INTEGER + ">"); } - protected Constant createDecimalLiteral(String lexicalForm) { + Constant createDecimalLiteral(String lexicalForm) { return Expressions.makeConstant(lexicalForm + "^^<" + PrefixDeclarations.XSD_DECIMAL + ">"); } - protected Constant createDoubleLiteral(String lexicalForm) { + Constant createDoubleLiteral(String lexicalForm) { return Expressions.makeConstant(lexicalForm + "^^<" + PrefixDeclarations.XSD_DOUBLE + ">"); } - + void addDataSource(String predicateName, int arity, DataSource dataSource) { Predicate predicate = Expressions.makePredicate(predicateName, arity); - dataSources.add(Pair.of(predicate,dataSource)); + dataSources.add(Pair.of(predicate, dataSource)); } - protected static String unescapeStr(String s, int line, int column) throws ParseException { + static String unescapeStr(String s, int line, int column) throws ParseException { return unescape(s, '\\', false, line, column); } - protected static String unescape(String s, char escape, boolean pointCodeOnly, int line, int column) - throws ParseException { + static String unescape(String s, char escape, boolean pointCodeOnly, int line, int column) throws ParseException { int i = s.indexOf(escape); if (i == -1) @@ -180,21 +179,21 @@ protected static String unescape(String s, char escape, boolean pointCodeOnly, i } /** Remove first and last characters (e.g. ' or "") from a string */ - protected static String stripQuotes(String s) { + static String stripQuotes(String s) { return s.substring(1, s.length() - 1); } /** Remove first 3 and last 3 characters (e.g. ''' or """) from a string */ - protected static String stripQuotes3(String s) { + static String stripQuotes3(String s) { return s.substring(3, s.length() - 3); } /** remove the first n charcacters from the string */ - protected static String stripChars(String s, int n) { + static String stripChars(String s, int n) { return s.substring(n, s.length()); } - protected String strRDFLiteral(String data, String lang, String dt) { + String strRDFLiteral(String data, String lang, String dt) { // https://www.w3.org/TR/turtle/#grammar-production-String RDFLiteral String ret = "\"" + data + "\""; if (dt != null) { @@ -212,7 +211,7 @@ protected String strRDFLiteral(String data, String lang, String dt) { /** * Reset the local set variables used when parsing a rule. */ - protected void resetVariableSets() { + void resetVariableSets() { this.bodyVars.clear(); this.headExiVars.clear(); this.headUniVars.clear(); @@ -225,7 +224,7 @@ public List getRules() { public List getFacts() { return facts; } - + public List> getDataSources() { return dataSources; } From 5efb2caddde075f6b40bd1344ae840a6c25217df Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 20 Aug 2019 09:15:10 +0200 Subject: [PATCH 0115/1003] another test --- .../semanticweb/vlog4j/syntax/parser/RuleParserTest.java | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index bc8c0b9db..af93ccbc6 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -427,5 +427,12 @@ public void testSparqlSource() throws ParsingException, MalformedURLException { assertEquals(p, ruleParser.getDataSources().get(0).getLeft()); assertEquals(sparqlds, ruleParser.getDataSources().get(0).getRight()); } + + @Test(expected = ParsingException.class) + public void testSparqlSourceMalformedUrl() throws ParsingException, MalformedURLException { + String input = "@source p(2) : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + } } From 7558fccdb8dfb12f96535fc1c699c63c9efd6b92 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 20 Aug 2019 09:40:39 +0200 Subject: [PATCH 0116/1003] Expand rule to match comment --- .../semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java index bee2c4ef3..3a5c9113c 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java @@ -74,7 +74,7 @@ public static void main(final String[] args) throws EdbIdbSeparationException, I // every bicycle has some part that is a wheel: + "hasPartIDB(?X, !Y), wheelIDB(!Y) :- bicycleIDB(?X) ." // every wheel is part of some bicycle: - + "isPartOfIDB(?X, !Y) :- wheelIDB(?X) ." + + "isPartOfIDB(?X, !Y), bicycleIDB(!Y) :- wheelIDB(?X) ." // hasPart and isPartOf are mutually inverse relations: + "hasPartIDB(?X, ?Y) :- isPartOfIDB(?Y, ?X) ." // + "isPartOfIDB(?X, ?Y) :- hasPartIDB(?Y, ?X) ."; From ee1a58f822b4caee3f86b15cedbeb702034febe9 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 20 Aug 2019 09:40:51 +0200 Subject: [PATCH 0117/1003] Rewrote example to use new features --- .../SkolemVsRestrictedChaseTermination.java | 150 ++++++------------ 1 file changed, 49 insertions(+), 101 deletions(-) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java index 6377ac2c8..fcd07a959 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java @@ -25,117 +25,67 @@ import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.Algorithm; import org.semanticweb.vlog4j.core.reasoner.Reasoner; +import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.examples.ExamplesUtils; +import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.RuleParser; /** * This example shows non-termination of the Skolem Chase, versus termination of * the Restricted Chase on the same set of rules and facts. Note that the * Restricted Chase is the default reasoning algorithm, as it terminates in most * cases and generates a smaller number of facts. - * - * @TODO Convert to use string-based rules and parse them, instead of building - * rules tediously in Java. * * @author Irina Dragoste * */ public class SkolemVsRestrictedChaseTermination { - public static void main(final String[] args) - throws ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException, IOException { + public static void main(final String[] args) throws ReasonerStateException, EdbIdbSeparationException, + IncompatiblePredicateArityException, IOException, ParsingException { ExamplesUtils.configureLogging(); - /* 1. Instantiating entities, rules and facts */ - final Predicate bicycleIDB = Expressions.makePredicate("BicycleIDB", 1); - final Predicate bicycleEDB = Expressions.makePredicate("BicycleEDB", 1); - final Predicate wheelIDB = Expressions.makePredicate("WheelIDB", 1); - final Predicate wheelEDB = Expressions.makePredicate("WheelEDB", 1); - final Predicate hasPartIDB = Expressions.makePredicate("HasPartIDB", 2); - final Predicate hasPartEDB = Expressions.makePredicate("HasPartEDB", 2); - final Predicate isPartOfIDB = Expressions.makePredicate("IsPartOfIDB", 2); - final Predicate isPartOfEDB = Expressions.makePredicate("IsPartOfEDB", 2); - final Constant bicycle1 = Expressions.makeConstant("bicycle1"); - final Constant bicycle2 = Expressions.makeConstant("bicycle2"); - final Constant wheel1 = Expressions.makeConstant("wheel1"); - final Variable x = Expressions.makeVariable("x"); - final Variable y = Expressions.makeVariable("y"); - - /* BicycleIDB(?x) :- BicycleEDB(?x) . */ - final PositiveLiteral bicycleIDBX = Expressions.makePositiveLiteral(bicycleIDB, x); - final PositiveLiteral bicycleEDBX = Expressions.makePositiveLiteral(bicycleEDB, x); - final Rule rule1 = Expressions.makeRule(bicycleIDBX, bicycleEDBX); - - /* WheelIDB(?x) :- WheelEDB(?x) . */ - final PositiveLiteral wheelIDBX = Expressions.makePositiveLiteral(wheelIDB, x); - final PositiveLiteral wheelEDBX = Expressions.makePositiveLiteral(wheelEDB, x); - final Rule rule2 = Expressions.makeRule(wheelIDBX, wheelEDBX); - - /* hasPartIDB(?x, ?y) :- hasPartEDB(?x, ?y) . */ - final PositiveLiteral hasPartIDBXY = Expressions.makePositiveLiteral(hasPartIDB, x, y); - final PositiveLiteral hasPartEDBXY = Expressions.makePositiveLiteral(hasPartEDB, x, y); - final Rule rule3 = Expressions.makeRule(hasPartIDBXY, hasPartEDBXY); - - /* isPartOfIDB(?x, ?y) :- isPartOfEDB(?x, ?y) . */ - final PositiveLiteral isPartOfIDBXY = Expressions.makePositiveLiteral(isPartOfIDB, x, y); - final PositiveLiteral isPartOfEDBXY = Expressions.makePositiveLiteral(isPartOfEDB, x, y); - final Rule rule4 = Expressions.makeRule(isPartOfIDBXY, isPartOfEDBXY); - - /* - * exists y. HasPartIDB(?x, !y), WheelIDB(!y) :- BicycleIDB(?x) . - */ - final PositiveLiteral wheelIDBY = Expressions.makePositiveLiteral(wheelIDB, y); - final Rule rule5 = Expressions.makeRule(Expressions.makePositiveConjunction(hasPartIDBXY, wheelIDBY), - Expressions.makeConjunction(bicycleIDBX)); - - /* - * exists y. IsPartOfIDB(?x, !y), BicycleIDB(!y) :- WheelIDB(?x) . - */ - final PositiveLiteral bycicleIDBY = Expressions.makePositiveLiteral(bicycleIDB, y); - final Rule rule6 = Expressions.makeRule(Expressions.makePositiveConjunction(isPartOfIDBXY, bycicleIDBY), - Expressions.makeConjunction(wheelIDBX)); - - /* IsPartOfIDB(?x, ?y) :- HasPartIDB(?y, ?x) . */ - final PositiveLiteral hasPartIDBYX = Expressions.makePositiveLiteral(hasPartIDB, y, x); - final Rule rule7 = Expressions.makeRule(isPartOfIDBXY, hasPartIDBYX); - - /* HasPartIDB(?x, ?y) :- IsPartOfIDB(?y, ?x) . */ - final PositiveLiteral isPartOfIDBYX = Expressions.makePositiveLiteral(isPartOfIDB, y, x); - final Rule rule8 = Expressions.makeRule(hasPartIDBXY, isPartOfIDBYX); - - /* BicycleEDB(bicycle1) . */ - final PositiveLiteral fact1 = Expressions.makePositiveLiteral(bicycleEDB, bicycle1); - - /* HasPartEDB(bicycle1, wheel1) . */ - final PositiveLiteral fact2 = Expressions.makePositiveLiteral(hasPartEDB, bicycle1, wheel1); - - /* Wheel(wheel1) . */ - final PositiveLiteral fact3 = Expressions.makePositiveLiteral(wheelEDB, wheel1); - - /* BicycleEDB(b) . */ - final PositiveLiteral fact4 = Expressions.makePositiveLiteral(bicycleEDB, bicycle2); + /* 1. Load data and prepare rules. */ + + final String rules = "" // define some facts: + + "bicycleEDB(bicycle1) ." // + + "hasPartEDB(bicycle1, wheel1) ." // + + "wheelEDB(wheel1) ." // + + "bicycleEDB(bicycle2) ." // + // rules to load all data from the file-based ("EDB") predicates: + + "bicycleIDB(?X) :- bicycleEDB(?X) ." // + + "wheelIDB(?X) :- wheelEDB(?X) ." // + + "hasPartIDB(?X, ?Y) :- hasPartEDB(?X, ?Y) ." // + + "isPartOfIDB(?X, ?Y) :- isPartOfEDB(?X, ?Y) ." // + // every bicycle has some part that is a wheel: + + "hasPartIDB(?X, !Y), wheelIDB(!Y) :- bicycleIDB(?X) ." // + // every wheel is part of some bicycle: + + "isPartOfIDB(?X, !Y), bicycleIDB(!Y) :- wheelIDB(?X) ." // + // hasPart and isPartOf are mutually inverse relations: + + "hasPartIDB(?X, ?Y) :- isPartOfIDB(?Y, ?X) ." // + + "isPartOfIDB(?X, ?Y) :- hasPartIDB(?Y, ?X) ."; + + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(rules); /* * 2. Loading, reasoning, and querying. Use try-with resources, or remember to * call close() to free the reasoner resources. */ try (Reasoner reasoner = Reasoner.getInstance()) { - - reasoner.addRules(rule1, rule2, rule3, rule4, rule5, rule6, rule7, rule8); - reasoner.addFacts(fact1, fact2, fact3, fact4); + reasoner.addRules(ruleParser.getRules()); + reasoner.addFacts(ruleParser.getFacts()); reasoner.load(); + PositiveLiteral queryHasPart = ruleParser.parsePositiveLiteral("hasPartIDB(?X, ?Y)"); + /* See that there is no fact HasPartIDB before reasoning. */ - System.out.println("Answers to query " + hasPartIDBXY + " before reasoning:"); - ExamplesUtils.printOutQueryAnswers(hasPartIDBXY, reasoner); + System.out.println("Before reasoning is started, no inferrences have been computed yet."); + ExamplesUtils.printOutQueryAnswers(queryHasPart, reasoner); /* * As the Skolem Chase is known not to terminate for this set of rules and @@ -143,27 +93,26 @@ public static void main(final String[] args) */ reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); reasoner.setReasoningTimeout(1); - System.out.println("Starting Skolem Chase with 1 second timeout."); - - /* Indeed, the Skolem Chase did not terminate before timeout. */ + System.out.print("Starting Skolem Chase (a.k.a. semi-oblivious chase) with 1 second timeout ... "); final boolean skolemChaseFinished = reasoner.reason(); - System.out.println("Has Skolem Chase algorithm finished before 1 second timeout? " + skolemChaseFinished); + System.out.println("done."); + /* Verify that the Skolem Chase did not terminate before timeout. */ + System.out.println("Has Skolem Chase algorithm finished before 1 second timeout? " + skolemChaseFinished); /* * See that the Skolem Chase generated a very large number of facts in 1 second, * extensively introducing new unnamed individuals to satisfy existential * restrictions. */ - System.out.println( - "Answers to query " + hasPartIDBXY + " after reasoning with the Skolem Chase for 1 second:"); - ExamplesUtils.printOutQueryAnswers(hasPartIDBXY, reasoner); + QueryResultIterator answers = reasoner.answerQuery(queryHasPart, true); + System.out.println("Before the timeout, the Skolem chase had produced " + + ExamplesUtils.iteratorSize(answers) + " results for hasPartIDB(?X, ?Y)."); /* * We reset the reasoner and apply the Restricted Chase on the same set of rules * and facts */ - System.out.println(); - System.out.println("Reseting reasoner; discarding facts generated during reasoning."); + System.out.println("\nReseting reasoner; discarding facts generated during reasoning."); reasoner.resetReasoner(); reasoner.load(); @@ -171,8 +120,8 @@ public static void main(final String[] args) * See that there is no fact HasPartIDB before reasoning. All inferred facts * have been discarded when the reasoner was reset. */ - System.out.println("Answers to query " + hasPartIDBXY + " before reasoning:"); - ExamplesUtils.printOutQueryAnswers(hasPartIDBXY, reasoner); + System.out.println("We can verify that there are no inferences for hasPartIDB(?X, ?Y) after reset."); + ExamplesUtils.printOutQueryAnswers(queryHasPart, reasoner); /* * As the Restricted Chase is known to terminate for this set of rules and @@ -181,20 +130,19 @@ public static void main(final String[] args) reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); reasoner.setReasoningTimeout(null); final long restrictedChaseStartTime = System.currentTimeMillis(); - System.out.println("Starting Restricted Chase with no timeout."); + System.out.print("Starting Restricted Chase (a.k.a. Standard Chase) without any timeout ... "); + reasoner.reason(); + System.out.println("done."); - /* Indeed, the Restricted Chase did terminate (in less than 1 second) */ - final boolean restrictedChaseFinished = reasoner.reason(); + /* The Restricted Chase terminates: */ final long restrictedChaseDuration = System.currentTimeMillis() - restrictedChaseStartTime; - System.out.println("Has Restricted Chase algorithm finished? " + restrictedChaseFinished + ". (Duration: " - + restrictedChaseDuration + " ms)"); + System.out.println("The Restricted Chase finished in " + restrictedChaseDuration + " ms."); /* * See that the Restricted Chase generated a small number of facts, reusing * individuals that satisfy existential restrictions. */ - System.out.println("Answers to query " + hasPartIDBXY + " after reasoning with the Restricted Chase:"); - ExamplesUtils.printOutQueryAnswers(hasPartIDBXY, reasoner); + ExamplesUtils.printOutQueryAnswers(queryHasPart, reasoner); } } From 0a71c8241fe421c88277a6e054fa25dbebb08844 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 20 Aug 2019 09:56:22 +0200 Subject: [PATCH 0118/1003] add RELEASE-NOTES to repo --- RELEASE-NOTES.md | 42 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 RELEASE-NOTES.md diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md new file mode 100644 index 000000000..e57d4e30d --- /dev/null +++ b/RELEASE-NOTES.md @@ -0,0 +1,42 @@ +VLog4j Release Notes +==================== + +VLog4j v0.4.0 +------------- + +Breaking changes: +* IRIs loaded from RDF inputs no longer include surrounding < > in their string identifier + +New features: +* New own syntax for rules, facts, and data sources to create knowledge bases from files or strings in Java + +Bugfixes: +* ... + + +VLog4j v0.3.0 +------------- + +New features: +* Support for Graal data structures (conversion from Graal model to VLog model objects) +* Stratified negation: rule bodies are conjunctions of positive or negated literals +* SPARQL-based data sources: load remote data from SPARQL endpoints +* Acyclicity and cyclicity checks: JA, RJA, MFA, RMFA, RFC, as well as a generic method that checks whether given set or rules and fact predicates are acyclic, cyclic, or undetermined + +VLog4j v0.2.0 +------------- + +New features: +* supporting File data sources of N-Triples format (.nt file extension) +* supporting g-zipped data source files (.csv.gz, .nt.gz) + +VLog4j v0.1.0 +------------- + +Initial release. + +New features: +* Essential data models for rules and facts, and essential reasoner functionality +* support for reading from RDF files +* support for converting rules from OWL ontology, loaded with the OWL API + From 27bfde4a6518bf87089cd747a56ce7578a7ecdea Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 20 Aug 2019 09:59:14 +0200 Subject: [PATCH 0119/1003] mentioned new examples --- RELEASE-NOTES.md | 1 + 1 file changed, 1 insertion(+) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index e57d4e30d..51eef5e28 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -9,6 +9,7 @@ Breaking changes: New features: * New own syntax for rules, facts, and data sources to create knowledge bases from files or strings in Java +* New and updated example programs to illustrate use of syntax Bugfixes: * ... From 079b66822f9ba2ba81a035166200d27922cf6a42 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 20 Aug 2019 10:29:43 +0200 Subject: [PATCH 0120/1003] update coveralls configuration --- .travis.yml | 2 +- pom.xml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index e531b674c..4abfe58ab 100644 --- a/.travis.yml +++ b/.travis.yml @@ -21,7 +21,7 @@ before_install: install: mvn install $OPTIONS -DskipTests=true after_success: - - mvn clean cobertura:cobertura coveralls:cobertura + - mvn clean cobertura:cobertura coveralls:report dist: trusty sudo: false diff --git a/pom.xml b/pom.xml index 13d612650..d954f898b 100644 --- a/pom.xml +++ b/pom.xml @@ -219,7 +219,7 @@ org.eluder.coveralls coveralls-maven-plugin - 2.1.0 + 4.3.0 From eb11c9a9ed3016c2e6a8f263d4aa199176b37a0f Mon Sep 17 00:00:00 2001 From: Ceriel Jacobs Date: Tue, 20 Aug 2019 10:33:17 +0200 Subject: [PATCH 0121/1003] fixes in example --- .../examples/SimpleReasoningExample.java | 26 ++++++++++--------- 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java index 0fe631915..6b136629c 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java @@ -44,22 +44,24 @@ public static void main(final String[] args) throws IOException { // Define some facts and rules in VLog's basic syntax: String rules = "% --- Some facts --- \n" // + "location(germany,europe). " // -// + "location(uk,europe). " // + + "location(uk,europe). " // + "location(saxony,germany). " // -// + "location(dresden,saxony). " // -// + "city(dresden). " // -// + "country(germany). country(uk). " // - + "university(tudresden, germany) . university(uoxford, uk) . " // -// + "streetAddress(tudresden, \"Mommsenstraße 9\", \"01069\", \"Dresden\") ." // + + "location(dresden,saxony). " // + + "city(dresden). " // + + "country(germany). " // + + "country(uk). " // + + "university(tudresden, germany). " // + + "university(uoxford, uk) . " // + + "streetAddress(tudresden, \"Mommsenstraße 9\", \"01069\", \"Dresden\") ." // + "zipLocation(\"01069\", dresden) ." // -// + "% --- Standard recursion: locations are transitive --- \n" // + + "% --- Standard recursion: locations are transitive --- \n" // + "locatedIn(?X,?Y) :- location(?X,?Y) . " // -// + "locatedIn(?X,?Z) :- location(?X,?Y), locatedIn(?Y,?Z) . " // -// + "% --- Build address facts using the city constant --- \n" // -// + "address(?Uni, ?Street, ?ZIP, ?City) :- address(?Uni, ?Street, ?ZIP, ?CityName), zipLocation(?ZIP,?City) ." -// + "% --- Value invention: universities have some address --- \n" // + + "locatedIn(?X,?Z) :- locatedIn(?X,?Y), locatedIn(?Y,?Z) . " // + + "% --- Build address facts using the city constant --- \n" // + + "address(?Uni, ?Street, ?ZIP, ?City) :- streetAddress(?Uni, ?Street, ?ZIP, ?CityName), zipLocation(?ZIP,?City) ." + + "% --- Value invention: universities have some address --- \n" // + "address(?Uni, !Street, !ZIP, !City), locatedIn(!City, ?Country) :- university(?Uni, ?Country) ." -// + "% --- Negation: organisations in Europe but not in Germany --- \n" // + + "% --- Negation: organisations in Europe but not in Germany --- \n" // + "inEuropeOutsideGermany(?Org) :- address(?Org, ?S, ?Z, ?City), locatedIn(?City, europe), ~locatedIn(?City, germany) ." + ""; From 6a416fcb1841e78a6ae7dead9fdb1bcf9dfe5b85 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 20 Aug 2019 10:35:49 +0200 Subject: [PATCH 0122/1003] Add note on javacc in Eclipse --- README.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/README.md b/README.md index cc33606ce..c1aa585b8 100644 --- a/README.md +++ b/README.md @@ -22,6 +22,7 @@ The current release of VLog4j is version 0.3.0. The easiest way of using the lib You need to use Java 1.8 or above. Available modules include: * **vlog4j-core**: essential data models for rules and facts, and essential reasoner functionality +* **vlog4j-parser**: support for processing knowledge bases in VLog4j syntax * **vlog4j-graal**: support for converting rules, facts and queries from [Graal](http://graphik-team.github.io/graal/) API objects and [DLGP](http://graphik-team.github.io/graal/doc/dlgp) files * **vlog4j-rdf**: support for reading from RDF files * **vlog4j-owlapi**: support for converting rules from OWL ontology, loaded with the OWL API @@ -36,3 +37,9 @@ Documentation * The module **vlog4j-examples** includes short example programs that demonstrate some common use cases * [JavaDoc](https://knowsys.github.io/vlog4j/) is available online and through the Maven packages. + +Development +----------- + +* Pull requests are welcome. +* Users of Eclipse should install the javacc plugin to generate the parser sources. After installing the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.vlog4j.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. \ No newline at end of file From d2eac8d14ec5316679c12e3935d3a4f5542f2e0b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Markus=20Kr=C3=B6tzsch?= Date: Tue, 20 Aug 2019 10:47:26 +0200 Subject: [PATCH 0123/1003] mention vlog master dependency in development --- README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index c1aa585b8..44317791f 100644 --- a/README.md +++ b/README.md @@ -42,4 +42,6 @@ Development ----------- * Pull requests are welcome. -* Users of Eclipse should install the javacc plugin to generate the parser sources. After installing the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.vlog4j.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. \ No newline at end of file +* The master branch may require a development version of VLog. +Use the script `build-vlog-library.sh` to create and install it on your machine (you may need to delete previous local builds first). +* Users of Eclipse should install the javacc plugin to generate the parser sources. After installing the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.vlog4j.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. From f5e7c35ed71717b7c0903df1a37a37433313ee34 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 20 Aug 2019 11:02:22 +0200 Subject: [PATCH 0124/1003] Further improved example --- .../examples/SimpleReasoningExample.java | 50 +++++++++---------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java index 212e7688b..f2cca5444 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java @@ -43,27 +43,29 @@ public static void main(final String[] args) throws IOException { // Define some facts and rules in VLog's basic syntax: String rules = "% --- Some facts --- \n" // - + "location(germany,europe). " // - + "location(uk,europe). " // - + "location(saxony,germany). " // - + "location(dresden,saxony). " // - + "city(dresden). " // - + "country(germany). " // - + "country(uk). " // - + "university(tudresden, germany). " // - + "university(uoxford, uk) . " // - + "streetAddress(tudresden, \"Mommsenstraße 9\", \"01069\", \"Dresden\") ." // - + "zipLocation(\"01069\", dresden) ." // + + "location(germany,europe). \n" // + + "location(uk,europe). \n" // + + "location(saxony,germany). \n" // + + "location(dresden,saxony). \n" // + + "city(dresden). \n" // + + "country(germany). \n" // + + "country(uk). \n" // + + "university(tudresden, germany). \n" // + + "university(uoxford, uk) . \n" // + + "streetAddress(tudresden, \"Mommsenstraße 9\", \"01069\", \"Dresden\") . \n" // + + "zipLocation(\"01069\", dresden) . \n" // + "% --- Standard recursion: locations are transitive --- \n" // - + "locatedIn(?X,?Y) :- location(?X,?Y) . " // - + "locatedIn(?X,?Z) :- locatedIn(?X,?Y), locatedIn(?Y,?Z) . " // + + "locatedIn(?X,?Y) :- location(?X,?Y) . \n" // + + "locatedIn(?X,?Z) :- locatedIn(?X,?Y), locatedIn(?Y,?Z) . \n" // + "% --- Build address facts using the city constant --- \n" // - + "address(?Uni, ?Street, ?ZIP, ?City) :- streetAddress(?Uni, ?Street, ?ZIP, ?CityName), zipLocation(?ZIP,?City) ." + + "address(?Uni, ?Street, ?ZIP, ?City) :- streetAddress(?Uni, ?Street, ?ZIP, ?CityName), zipLocation(?ZIP, ?City) . \n" + "% --- Value invention: universities have some address --- \n" // - + "address(?Uni, !Street, !ZIP, !City), locatedIn(!City, ?Country) :- university(?Uni, ?Country) ." + + "address(?Uni, !Street, !ZIP, !City), locatedIn(!City, ?Country) :- university(?Uni, ?Country) . \n" + "% --- Negation: organisations in Europe but not in Germany --- \n" // - + "inEuropeOutsideGermany(?Org) :- address(?Org, ?S, ?Z, ?City), locatedIn(?City, europe), ~locatedIn(?City, germany) ." - + ""; + + "inEuropeOutsideGermany(?Org) :- address(?Org, ?S, ?Z, ?City), locatedIn(?City, europe), ~locatedIn(?City, germany) . \n" + + "% ---\n"; + + System.out.println("Knowledge base used in this example:\n\n" + rules); RuleParser ruleParser = new RuleParser(); try { @@ -74,22 +76,20 @@ public static void main(final String[] args) throws IOException { } try (final Reasoner reasoner = Reasoner.getInstance()) { - reasoner.addFacts(ruleParser.getFacts()); reasoner.addRules(ruleParser.getRules()); - System.out.println("Rules configured:\n--"); - reasoner.getRules().forEach(System.out::println); - System.out.println("--"); - + System.out.print("Loading rules and facts ... "); reasoner.load(); + System.out.println("done."); - System.out.println("Loading completed."); - System.out.println("Starting reasoning ..."); + System.out.print("Computing all inferences ... "); reasoner.reason(); - System.out.println("... reasoning completed.\n--"); + System.out.println("done.\n"); /* Execute some queries */ + ExamplesUtils.printOutQueryAnswers("address(?Org, ?Street, ?ZIP, ?City)", reasoner); + ExamplesUtils.printOutQueryAnswers("locatedIn(?place, europe)", reasoner); ExamplesUtils.printOutQueryAnswers("inEuropeOutsideGermany(?Org)", reasoner); System.out.println("Done."); From 92fb842ad15bc18740551635658f6188b8a5f1f0 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 20 Aug 2019 14:17:25 +0200 Subject: [PATCH 0125/1003] Dedicated objects for data source declarations * get rid of Pair * improve integration in vlog4j.model * reorganise some code --- .../IncompatiblePredicateArityException.java | 2 +- .../{reasoner => model/api}/DataSource.java | 7 +- .../core/model/api/DataSourceDeclaration.java | 45 ++++++++++ .../vlog4j/core/model/api/Literal.java | 10 +-- .../DataSourceDeclarationImpl.java | 86 +++++++++++++++++++ .../vlog4j/core/reasoner/Reasoner.java | 1 + .../implementation/FileDataSource.java | 10 +-- .../SparqlQueryResultDataSource.java | 5 +- .../implementation/VLogDataSource.java | 36 ++++++++ .../reasoner/implementation/VLogReasoner.java | 2 +- .../core/model/DataSourceDeclarationTest.java | 66 ++++++++++++++ .../implementation/AddDataSourceTest.java | 2 +- .../vlog4j/examples/CountingTriangles.java | 9 +- .../vlog4j/examples/DoidExample.java | 9 +- .../examples/core/AddDataFromCsvFile.java | 9 +- .../examples/core/AddDataFromRdfFile.java | 8 +- .../core/AddDataFromSparqlQueryResults.java | 2 +- .../examples/graal/DoidExampleGraal.java | 2 +- .../semanticweb/vlog4j/parser/RuleParser.java | 12 ++- .../vlog4j/parser/javacc/JavaCCParser.jj | 2 +- .../parser/javacc/JavaCCParserBase.java | 13 +-- .../vlog4j/syntax/parser/RuleParserTest.java | 20 ++--- 22 files changed, 289 insertions(+), 69 deletions(-) rename vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/{reasoner => model/api}/DataSource.java (80%) create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSourceDeclaration.java create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogDataSource.java create mode 100644 vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/IncompatiblePredicateArityException.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/IncompatiblePredicateArityException.java index c072e49fa..46d9075eb 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/IncompatiblePredicateArityException.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/IncompatiblePredicateArityException.java @@ -22,8 +22,8 @@ import java.text.MessageFormat; +import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.reasoner.DataSource; import org.semanticweb.vlog4j.core.reasoner.Reasoner; /** diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/DataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSource.java similarity index 80% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/DataSource.java rename to vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSource.java index aa139eb43..abaaa9d03 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/DataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSource.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner; +package org.semanticweb.vlog4j.core.model.api; /*- * #%L @@ -27,10 +27,7 @@ * */ public interface DataSource { - - public static final String PREDICATE_NAME_CONFIG_LINE = "EDB%1$d_predname=%2$s\n"; - public static final String DATASOURCE_TYPE_CONFIG_PARAM = "EDB%1$d_type"; - + /** * Constructs a String representation of the data source. * diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSourceDeclaration.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSourceDeclaration.java new file mode 100644 index 000000000..f92f19a0f --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSourceDeclaration.java @@ -0,0 +1,45 @@ +package org.semanticweb.vlog4j.core.model.api; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * A declaration for an external data source, which assigns a predicate to a + * source. + * + * @author Markus Kroetzsch + * + */ +public interface DataSourceDeclaration { + + /** + * Returns the {@link Predicate} that this source applies to. + * + * @return predicate into which data from the given source is loaded + */ + Predicate getPredicate(); + + /** + * Returns the {@link DataSource} that the data is loaded from. + * + * @return data source specification + */ + DataSource getDataSource(); +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Literal.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Literal.java index 711ee5196..05928068b 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Literal.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Literal.java @@ -23,19 +23,15 @@ import java.util.List; import java.util.Set; -/** - * - * @author Irina Dragoste - * - */ /** * Interface for literals. A literal is predicate applied to a tuple of terms, - * with a positive or negative polarity. An atomic formula is a formula - * of the form P(t1,...,tn) for P a {@link Predicate} name, and t1,...,tn some + * with a positive or negative polarity. An atomic formula is a formula of the + * form P(t1,...,tn) for P a {@link Predicate} name, and t1,...,tn some * {@link Term}s. A Literal is a positive or negated atomic formula. The number * of terms in the tuple corresponds to the {@link Predicate} arity. * * @author david.carral@tu-dresden.de + * @author Irina Dragoste */ public interface Literal { diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java new file mode 100644 index 000000000..37a5e198e --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java @@ -0,0 +1,86 @@ +package org.semanticweb.vlog4j.core.model.implementation; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.apache.commons.lang3.Validate; +import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.Predicate; + +/** + * Basic implementation for {@link DataSourceDeclaration}. + * + * @author Markus Kroetzsch + * + */ +public class DataSourceDeclarationImpl implements DataSourceDeclaration { + + final Predicate predicate; + final DataSource dataSource; + + public DataSourceDeclarationImpl(Predicate predicate, DataSource dataSource) { + Validate.notNull(predicate, "Predicate cannot be null."); + Validate.notNull(dataSource, "Data source cannot be null."); + this.predicate = predicate; + this.dataSource = dataSource; + } + + @Override + public Predicate getPredicate() { + return this.predicate; + } + + @Override + public DataSource getDataSource() { + return this.dataSource; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = this.predicate.hashCode(); + result = prime * result + this.dataSource.hashCode(); + return result; + } + + @Override + public boolean equals(final Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (!(obj instanceof DataSourceDeclaration)) { + return false; + } + final DataSourceDeclaration other = (DataSourceDeclaration) obj; + + return (this.predicate.equals(other.getPredicate())) && this.dataSource.equals(other.getDataSource()); + } + + @Override + public String toString() { + return "@source " + this.predicate.toString() + "(" + this.predicate.getArity() + ") : " + + this.dataSource.toConfigString() + " ."; + } + +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 345ad73f0..d7edc595c 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -9,6 +9,7 @@ import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; +import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.QueryResult; diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSource.java index 38d44528b..8c5fb7e21 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSource.java @@ -29,7 +29,6 @@ import org.apache.commons.lang3.Validate; import org.eclipse.jdt.annotation.NonNull; -import org.semanticweb.vlog4j.core.reasoner.DataSource; /** * A {@code FileDataSource} is an abstract implementation of a storage for fact @@ -40,7 +39,7 @@ * @author Irina Dragoste * */ -public abstract class FileDataSource implements DataSource { +public abstract class FileDataSource extends VLogDataSource { private final static String DATASOURCE_TYPE_CONFIG_VALUE = "INMEMORY"; @@ -79,9 +78,8 @@ private String getValidExtension(final File file, final Iterable possibl final Optional potentialExtension = extensionsStream.filter(ex -> fileName.endsWith(ex)).findFirst(); if (!potentialExtension.isPresent()) { - throw new IllegalArgumentException( - "Expected one of the following extensions for the data source file " + file + ": " - + String.join(", ", possibleExtensions) + "."); + throw new IllegalArgumentException("Expected one of the following extensions for the data source file " + + file + ": " + String.join(", ", possibleExtensions) + "."); } return potentialExtension.get(); @@ -93,7 +91,7 @@ public final String toConfigString() { PREDICATE_NAME_CONFIG_LINE + - DATASOURCE_TYPE_CONFIG_PARAM + "=" + DATASOURCE_TYPE_CONFIG_VALUE + "\n" + + DATASOURCE_TYPE_CONFIG_PARAM + "=" + DATASOURCE_TYPE_CONFIG_VALUE + "\n" + "EDB%1$d_param0=" + this.dirCanonicalPath + "\n" + diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java index 0ed6d7ec3..c4f83be33 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java @@ -27,7 +27,6 @@ import org.apache.commons.lang3.Validate; import org.eclipse.jdt.annotation.NonNull; import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.reasoner.DataSource; /** * A SparqlQueryResultDataSource provide the results of a SPARQL query on a @@ -36,7 +35,7 @@ * @author Irina Dragoste * */ -public class SparqlQueryResultDataSource implements DataSource { +public class SparqlQueryResultDataSource extends VLogDataSource { private static final String DATASOURCE_TYPE_CONFIG_VALUE = "SPARQL"; @@ -113,7 +112,7 @@ public String getQueryVariables() { public final String toConfigString() { final String configStringPattern = - DataSource.PREDICATE_NAME_CONFIG_LINE + + PREDICATE_NAME_CONFIG_LINE + DATASOURCE_TYPE_CONFIG_PARAM + "=" + DATASOURCE_TYPE_CONFIG_VALUE + "\n" + diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogDataSource.java new file mode 100644 index 000000000..04024d3d2 --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogDataSource.java @@ -0,0 +1,36 @@ +package org.semanticweb.vlog4j.core.reasoner.implementation; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.vlog4j.core.model.api.DataSource; + +/** + * Abstract base class for VLog-specific data sources. + * + * @author Markus Kroetzsch + * + */ +public abstract class VLogDataSource implements DataSource { + + public static final String PREDICATE_NAME_CONFIG_LINE = "EDB%1$d_predname=%2$s\n"; + public static final String DATASOURCE_TYPE_CONFIG_PARAM = "EDB%1$d_type"; + +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 3263b158c..0c435e7ba 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -16,6 +16,7 @@ import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; +import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; @@ -24,7 +25,6 @@ import org.semanticweb.vlog4j.core.reasoner.AcyclicityNotion; import org.semanticweb.vlog4j.core.reasoner.Algorithm; import org.semanticweb.vlog4j.core.reasoner.CyclicityResult; -import org.semanticweb.vlog4j.core.reasoner.DataSource; import org.semanticweb.vlog4j.core.reasoner.LogLevel; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.ReasonerState; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java new file mode 100644 index 000000000..c718ccd1a --- /dev/null +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java @@ -0,0 +1,66 @@ +package org.semanticweb.vlog4j.core.model; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.net.MalformedURLException; +import java.net.URL; + +import org.junit.Test; +import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; + +public class DataSourceDeclarationTest { + + @Test + public void equalityTest() throws MalformedURLException { + DataSource dataSource1 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var", + "?var wdt:P31 wd:Q5 ."); + Predicate predicate1 = Expressions.makePredicate("p", 3); + DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource1); + + DataSource dataSource2 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var", + "?var wdt:P31 wd:Q5 ."); + Predicate predicate2 = Expressions.makePredicate("p", 3); + DataSourceDeclaration dataSourceDeclaration2 = new DataSourceDeclarationImpl(predicate2, dataSource2); + + DataSource dataSource3 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var2", + "?var2 wdt:P31 wd:Q5 ."); + DataSourceDeclaration dataSourceDeclaration3 = new DataSourceDeclarationImpl(predicate2, dataSource3); + + Predicate predicate4 = Expressions.makePredicate("q", 1); + DataSourceDeclaration dataSourceDeclaration4 = new DataSourceDeclarationImpl(predicate4, dataSource2); + + assertEquals(dataSourceDeclaration1, dataSourceDeclaration1); + assertEquals(dataSourceDeclaration1, dataSourceDeclaration2); + assertEquals(dataSourceDeclaration1.hashCode(), dataSourceDeclaration2.hashCode()); + assertNotEquals(dataSourceDeclaration1, dataSource1); + assertNotEquals(dataSourceDeclaration1, dataSourceDeclaration3); + assertNotEquals(dataSourceDeclaration1, dataSourceDeclaration4); + assertFalse(dataSourceDeclaration1.equals(null)); // written like this for recording coverage properly + } + +} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java index 41fda702f..3c00b1fd3 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java @@ -32,11 +32,11 @@ import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.DataSource; import karmaresearch.vlog.EDBConfigurationException; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java index 454307e5b..8ea4b20e0 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java @@ -24,11 +24,9 @@ import java.io.IOException; -import org.apache.commons.lang3.tuple.Pair; import org.semanticweb.vlog4j.core.exceptions.VLog4jException; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.reasoner.DataSource; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; @@ -55,8 +53,9 @@ public static void main(final String[] args) throws IOException { System.out.println("Failed to parse rules: " + e.getMessage()); return; } - for (Pair pair : ruleParser.getDataSources()) { - reasoner.addFactsFromDataSource(pair.getLeft(), pair.getRight()); + for (DataSourceDeclaration dataSourceDeclaration : ruleParser.getDataSourceDeclartions()) { + reasoner.addFactsFromDataSource(dataSourceDeclaration.getPredicate(), + dataSourceDeclaration.getDataSource()); } reasoner.addRules(ruleParser.getRules()); System.out.println("Rules used in this example:"); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java index 8fbc5f191..a2f39fc74 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java @@ -25,11 +25,9 @@ import java.util.Arrays; import java.util.List; -import org.apache.commons.lang3.tuple.Pair; import org.semanticweb.vlog4j.core.exceptions.VLog4jException; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.reasoner.DataSource; import org.semanticweb.vlog4j.core.reasoner.LogLevel; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; @@ -63,8 +61,9 @@ public static void main(final String[] args) throws IOException { System.out.println("Failed to parse rules: " + e.getMessage()); return; } - for (Pair pair : ruleParser.getDataSources()) { - reasoner.addFactsFromDataSource(pair.getLeft(), pair.getRight()); + for (DataSourceDeclaration dataSourceDeclaration : ruleParser.getDataSourceDeclartions()) { + reasoner.addFactsFromDataSource(dataSourceDeclaration.getPredicate(), + dataSourceDeclaration.getDataSource()); } reasoner.addRules(ruleParser.getRules()); System.out.println("Rules used in this example:"); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java index 3a5c9113c..1bcd7ac00 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java @@ -22,13 +22,11 @@ import java.io.IOException; -import org.apache.commons.lang3.tuple.Pair; import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.reasoner.DataSource; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.vlog4j.examples.ExamplesUtils; @@ -88,8 +86,9 @@ public static void main(final String[] args) throws EdbIdbSeparationException, I */ try (final Reasoner reasoner = Reasoner.getInstance()) { reasoner.addRules(ruleParser.getRules()); - for (Pair pair : ruleParser.getDataSources()) { - reasoner.addFactsFromDataSource(pair.getLeft(), pair.getRight()); + for (DataSourceDeclaration dataSourceDeclaration : ruleParser.getDataSourceDeclartions()) { + reasoner.addFactsFromDataSource(dataSourceDeclaration.getPredicate(), + dataSourceDeclaration.getDataSource()); } reasoner.load(); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java index 9d8d690d5..6868522a5 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java @@ -22,13 +22,12 @@ import java.io.IOException; -import org.apache.commons.lang3.tuple.Pair; import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.reasoner.DataSource; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.examples.ExamplesUtils; @@ -91,8 +90,9 @@ public static void main(final String[] args) throws EdbIdbSeparationException, I */ try (final Reasoner reasoner = Reasoner.getInstance()) { reasoner.addRules(ruleParser.getRules()); - for (Pair pair : ruleParser.getDataSources()) { - reasoner.addFactsFromDataSource(pair.getLeft(), pair.getRight()); + for (DataSourceDeclaration dataSourceDeclaration : ruleParser.getDataSourceDeclartions()) { + reasoner.addFactsFromDataSource(dataSourceDeclaration.getPredicate(), + dataSourceDeclaration.getDataSource()); } reasoner.load(); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java index 07f7b2c15..c7edd863b 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java @@ -30,13 +30,13 @@ import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Conjunction; +import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.DataSource; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java index da23067c9..dcb2adaf7 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java @@ -30,12 +30,12 @@ import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; +import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.DataSource; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index 58068d124..cf1018272 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -24,13 +24,11 @@ import java.io.InputStream; import java.util.List; -import org.apache.commons.lang3.tuple.Pair; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.reasoner.DataSource; import org.semanticweb.vlog4j.parser.javacc.JavaCCParser; import org.semanticweb.vlog4j.parser.javacc.ParseException; import org.semanticweb.vlog4j.parser.javacc.TokenMgrError; @@ -98,9 +96,9 @@ public List getRules() { public List getFacts() { return parser.getFacts(); } - - public List> getDataSources() { - return parser.getDataSources(); + + public List getDataSourceDeclartions() { + return parser.getDataSourceDeclartions(); } - + } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index a81f9a822..cd9b6bab8 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -31,7 +31,7 @@ import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.reasoner.DataSource; +import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index c73cc2cd7..8c80e25bd 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -24,13 +24,14 @@ import java.util.ArrayList; import java.util.HashSet; -import org.apache.commons.lang3.tuple.Pair; import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.DataSource; import org.semanticweb.vlog4j.parser.LocalPrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Predicate; @@ -53,7 +54,7 @@ public class JavaCCParserBase { final List rules = new ArrayList<>(); final List facts = new ArrayList<>(); - final List> dataSources = new ArrayList<>(); + final List dataSourceDaclarations = new ArrayList<>(); /** * "Local" variable to remember (universal) body variables during parsing. @@ -101,7 +102,7 @@ Constant createDoubleLiteral(String lexicalForm) { void addDataSource(String predicateName, int arity, DataSource dataSource) { Predicate predicate = Expressions.makePredicate(predicateName, arity); - dataSources.add(Pair.of(predicate, dataSource)); + dataSourceDaclarations.add(new DataSourceDeclarationImpl(predicate, dataSource)); } static String unescapeStr(String s, int line, int column) throws ParseException { @@ -225,8 +226,8 @@ public List getFacts() { return facts; } - public List> getDataSources() { - return dataSources; + public List getDataSourceDeclartions() { + return dataSourceDaclarations; } } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index af93ccbc6..c78808e7f 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -391,9 +391,9 @@ public void testCsvSource() throws ParsingException, IOException { ruleParser.parse(input); CsvFileDataSource csvds = new CsvFileDataSource(new File("src/main/data/input/example.csv")); Predicate p = Expressions.makePredicate("p", 2); - assertEquals(1, ruleParser.getDataSources().size()); - assertEquals(p, ruleParser.getDataSources().get(0).getLeft()); - assertEquals(csvds, ruleParser.getDataSources().get(0).getRight()); + assertEquals(1, ruleParser.getDataSourceDeclartions().size()); + assertEquals(p, ruleParser.getDataSourceDeclartions().get(0).getPredicate()); + assertEquals(csvds, ruleParser.getDataSourceDeclartions().get(0).getDataSource()); } @Test @@ -403,9 +403,9 @@ public void testRdfSource() throws ParsingException, IOException { ruleParser.parse(input); RdfFileDataSource rdfds = new RdfFileDataSource(new File("src/main/data/input/example.nt.gz")); Predicate p = Expressions.makePredicate("p", 3); - assertEquals(1, ruleParser.getDataSources().size()); - assertEquals(p, ruleParser.getDataSources().get(0).getLeft()); - assertEquals(rdfds, ruleParser.getDataSources().get(0).getRight()); + assertEquals(1, ruleParser.getDataSourceDeclartions().size()); + assertEquals(p, ruleParser.getDataSourceDeclartions().get(0).getPredicate()); + assertEquals(rdfds, ruleParser.getDataSourceDeclartions().get(0).getDataSource()); } @Test(expected = ParsingException.class) @@ -423,11 +423,11 @@ public void testSparqlSource() throws ParsingException, MalformedURLException { SparqlQueryResultDataSource sparqlds = new SparqlQueryResultDataSource( new URL("https://query.wikidata.org/sparql"), "disease, doid", "?disease wdt:P699 ?doid ."); Predicate p = Expressions.makePredicate("p", 2); - assertEquals(1, ruleParser.getDataSources().size()); - assertEquals(p, ruleParser.getDataSources().get(0).getLeft()); - assertEquals(sparqlds, ruleParser.getDataSources().get(0).getRight()); + assertEquals(1, ruleParser.getDataSourceDeclartions().size()); + assertEquals(p, ruleParser.getDataSourceDeclartions().get(0).getPredicate()); + assertEquals(sparqlds, ruleParser.getDataSourceDeclartions().get(0).getDataSource()); } - + @Test(expected = ParsingException.class) public void testSparqlSourceMalformedUrl() throws ParsingException, MalformedURLException { String input = "@source p(2) : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; From 134d680ee248f330a64d8c86676108d1f249d708 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 20 Aug 2019 14:39:33 +0200 Subject: [PATCH 0126/1003] Fixed compile errors --- vlog4j-core/LICENSE.txt | 402 +++++++++--------- .../implementation/AddDataSourceTest.java | 4 - .../LoadDataFromCsvFileTest.java | 15 +- .../LoadDataFromMemoryTest.java | 3 - .../vlog4j/examples/CountingTriangles.java | 4 +- .../vlog4j/examples/DoidExample.java | 9 +- .../examples/SimpleReasoningExample.java | 10 +- .../examples/core/AddDataFromRdfFile.java | 6 +- .../SkolemVsRestrictedChaseTermination.java | 3 +- .../examples/graal/AddDataFromGraal.java | 4 +- .../examples/graal/DoidExampleGraal.java | 22 +- .../owlapi/OwlOntologyToRulesAndFacts.java | 5 +- .../examples/rdf/AddDataFromRdfModel.java | 10 +- 13 files changed, 241 insertions(+), 256 deletions(-) diff --git a/vlog4j-core/LICENSE.txt b/vlog4j-core/LICENSE.txt index 29f81d812..261eeb9e9 100644 --- a/vlog4j-core/LICENSE.txt +++ b/vlog4j-core/LICENSE.txt @@ -1,201 +1,201 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java index 2aeb1d45a..101f72d71 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java @@ -39,10 +39,6 @@ import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.DataSource; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; - import karmaresearch.vlog.EDBConfigurationException; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromCsvFileTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromCsvFileTest.java index 73aeff2a4..2322be140 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromCsvFileTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromCsvFileTest.java @@ -40,10 +40,6 @@ import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; - import karmaresearch.vlog.EDBConfigurationException; @@ -69,15 +65,16 @@ public void testLoadEmptyCsvFile() @Test public void testLoadUnaryFactsFromCsvFile() throws ReasonerStateException, EdbIdbSeparationException, - EDBConfigurationException, IOException, IncompatiblePredicateArityException { + EDBConfigurationException, IOException, IncompatiblePredicateArityException { testLoadUnaryFactsFromSingleCsvDataSource(new CsvFileDataSource(new File( FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.unzippedUnaryCsvFileRoot + ".csv"))); testLoadUnaryFactsFromSingleCsvDataSource(new CsvFileDataSource(new File( FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.zippedUnaryCsvFileRoot + ".csv.gz"))); } - private void testLoadUnaryFactsFromSingleCsvDataSource(final FileDataSource fileDataSource) throws ReasonerStateException, - EdbIdbSeparationException, EDBConfigurationException, IOException, IncompatiblePredicateArityException { + private void testLoadUnaryFactsFromSingleCsvDataSource(final FileDataSource fileDataSource) + throws ReasonerStateException, EdbIdbSeparationException, EDBConfigurationException, IOException, + IncompatiblePredicateArityException { final VLogKnowledgeBase kb = new VLogKnowledgeBase(); kb.addFactsFromDataSource(unaryPredicate1, fileDataSource); kb.addFactsFromDataSource(unaryPredicate2, fileDataSource); @@ -123,8 +120,8 @@ public void testLoadNonexistingCsvFile() @Test(expected = IncompatiblePredicateArityException.class) public void testLoadCsvFileWrongArity() throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { - final FileDataSource fileDataSource = new CsvFileDataSource( - new File(FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.binaryCsvFileNameRoot + ".csv")); + final FileDataSource fileDataSource = new CsvFileDataSource(new File( + FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.binaryCsvFileNameRoot + ".csv")); final VLogKnowledgeBase kb = new VLogKnowledgeBase(); kb.addFactsFromDataSource(unaryPredicate1, fileDataSource); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java index 4c853cb02..b5fa9d13b 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java @@ -32,9 +32,6 @@ import org.semanticweb.vlog4j.core.model.implementation.BlankImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; import karmaresearch.vlog.EDBConfigurationException; public class LoadDataFromMemoryTest { diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java index 932b88c66..757352f59 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java @@ -29,7 +29,8 @@ import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.reasoner.DataSource; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogKnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; @@ -64,7 +65,6 @@ public static void main(final String[] args) throws IOException { System.out.println(""); try (VLogReasoner reasoner = new VLogReasoner(kb)) { - /* Initialise reasoner and compute inferences */ System.out.print("Initialising rules and data sources ... "); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java index 300468ad4..46bdc6512 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java @@ -30,12 +30,11 @@ import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.reasoner.DataSource; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.reasoner.LogLevel; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogKnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; @@ -54,10 +53,6 @@ public class DoidExample { public static void main(final String[] args) throws IOException { ExamplesUtils.configureLogging(); - // <<<<<<< HEAD - // final URL wikidataSparqlEndpoint = new - // URL("https://query.wikidata.org/sparql"); - final VLogKnowledgeBase kb = new VLogKnowledgeBase(); try (Reasoner reasoner = new VLogReasoner(kb)) { diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java index f2cca5444..5fe449762 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java @@ -24,6 +24,8 @@ import org.semanticweb.vlog4j.core.exceptions.VLog4jException; import org.semanticweb.vlog4j.core.reasoner.Reasoner; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogKnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; @@ -74,10 +76,12 @@ public static void main(final String[] args) throws IOException { System.out.println("Failed to parse rules: " + e.getMessage()); return; } + + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); + kb.addRules(ruleParser.getRules()); + kb.addFacts(ruleParser.getFacts()); - try (final Reasoner reasoner = Reasoner.getInstance()) { - reasoner.addFacts(ruleParser.getFacts()); - reasoner.addRules(ruleParser.getRules()); + try (final Reasoner reasoner = new VLogReasoner(kb)) { System.out.print("Loading rules and facts ... "); reasoner.load(); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java index 2daa1ec30..3fda768d0 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java @@ -94,12 +94,12 @@ public static void main(final String[] args) throws EdbIdbSeparationException, I */ final KnowledgeBase kb = reasoner.getKnowledgeBase(); kb.addRules(ruleParser.getRules()); - + /* Importing {@code .nt.gz} file as data source. */ for (Pair pair : ruleParser.getDataSources()) { - reasoner.addFactsFromDataSource(pair.getLeft(), pair.getRight()); + kb.addFactsFromDataSource(pair.getLeft(), pair.getRight()); } - + reasoner.load(); System.out.println("Before materialisation:"); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java index 50d82e573..60a70716d 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java @@ -27,8 +27,9 @@ import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.reasoner.Algorithm; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogKnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; import org.semanticweb.vlog4j.examples.ExamplesUtils; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java index bc6a02716..f9bb66b90 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java @@ -28,10 +28,8 @@ import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogKnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.examples.ExamplesUtils; import org.semanticweb.vlog4j.graal.GraalConjunctiveQueryToRule; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java index da23067c9..86f54b3d6 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java @@ -40,6 +40,8 @@ import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogKnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; import org.semanticweb.vlog4j.examples.DoidExample; import org.semanticweb.vlog4j.examples.ExamplesUtils; import org.semanticweb.vlog4j.graal.GraalToVLog4JModelConverter; @@ -65,14 +67,16 @@ public static void main(final String[] args) ExamplesUtils.configureLogging(); final URL wikidataSparqlEndpoint = new URL("https://query.wikidata.org/sparql"); + + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = new VLogReasoner(kb)) { /* Configure RDF data source */ final Predicate doidTriplePredicate = makePredicate("doidTriple", 3); final DataSource doidDataSource = new RdfFileDataSource( new File(ExamplesUtils.INPUT_FOLDER + "doid.nt.gz")); - reasoner.addFactsFromDataSource(doidTriplePredicate, doidDataSource); + kb.addFactsFromDataSource(doidTriplePredicate, doidDataSource); /* Configure SPARQL data sources */ final String sparqlHumansWithDisease = "?disease wdt:P699 ?doid ."; @@ -80,21 +84,21 @@ public static void main(final String[] args) final DataSource diseasesDataSource = new SparqlQueryResultDataSource(wikidataSparqlEndpoint, "disease,doid", sparqlHumansWithDisease); final Predicate diseaseIdPredicate = Expressions.makePredicate("diseaseId", 2); - reasoner.addFactsFromDataSource(diseaseIdPredicate, diseasesDataSource); + kb.addFactsFromDataSource(diseaseIdPredicate, diseasesDataSource); final String sparqlRecentDeaths = "?human wdt:P31 wd:Q5; wdt:P570 ?deathDate . FILTER (YEAR(?deathDate) = 2018)"; // (wdt:P31 = "instance of"; wd:Q5 = "human", wdt:570 = "date of death") final DataSource recentDeathsDataSource = new SparqlQueryResultDataSource(wikidataSparqlEndpoint, "human", sparqlRecentDeaths); final Predicate recentDeathsPredicate = Expressions.makePredicate("recentDeaths", 1); - reasoner.addFactsFromDataSource(recentDeathsPredicate, recentDeathsDataSource); + kb.addFactsFromDataSource(recentDeathsPredicate, recentDeathsDataSource); final String sparqlRecentDeathsCause = sparqlRecentDeaths + "?human wdt:P509 ?causeOfDeath . "; // (wdt:P509 = "cause of death") final DataSource recentDeathsCauseDataSource = new SparqlQueryResultDataSource(wikidataSparqlEndpoint, "human,causeOfDeath", sparqlRecentDeathsCause); final Predicate recentDeathsCausePredicate = Expressions.makePredicate("recentDeathsCause", 2); - reasoner.addFactsFromDataSource(recentDeathsCausePredicate, recentDeathsCauseDataSource); + kb.addFactsFromDataSource(recentDeathsCausePredicate, recentDeathsCauseDataSource); /* Load rules from DLGP file */ try (final DlgpParser parser = new DlgpParser( @@ -102,7 +106,7 @@ public static void main(final String[] args) while (parser.hasNext()) { final Object object = parser.next(); if (object instanceof fr.lirmm.graphik.graal.api.core.Rule) { - reasoner.addRules( + kb.addRules( GraalToVLog4JModelConverter.convertRule((fr.lirmm.graphik.graal.api.core.Rule) object)); } } @@ -118,15 +122,15 @@ public static void main(final String[] args) final PositiveLiteral deathCause = Expressions.makePositiveLiteral("deathCause", x, y); final PositiveLiteral humansWhoDiedOfNoncancer = Expressions.makePositiveLiteral("humansWhoDiedOfNoncancer", x); - reasoner.addRules(Expressions.makeRule(Expressions.makePositiveConjunction(humansWhoDiedOfNoncancer), + kb.addRules(Expressions.makeRule(Expressions.makePositiveConjunction(humansWhoDiedOfNoncancer), Expressions.makeConjunction(deathCause, diseaseId, notCancerDisease))); // humansWhoDiedOfNoncancer(X) :- deathCause(X,Y), ~hasDoid(Y) final NegativeLiteral hasNotDoid = Expressions.makeNegativeLiteral("hasDoid", y); - reasoner.addRules(Expressions.makeRule(Expressions.makePositiveConjunction(humansWhoDiedOfNoncancer), + kb.addRules(Expressions.makeRule(Expressions.makePositiveConjunction(humansWhoDiedOfNoncancer), Expressions.makeConjunction(deathCause, hasNotDoid))); System.out.println("Rules configured:\n--"); - reasoner.getRules().forEach(System.out::println); + kb.getRules().forEach(System.out::println); System.out.println("--"); reasoner.load(); System.out.println("Loading completed."); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java index 8df5658dc..c2fdc33aa 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java @@ -37,10 +37,7 @@ import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogKnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java index 40db673d6..3d4525466 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java @@ -44,11 +44,7 @@ import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogKnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; @@ -81,7 +77,7 @@ public static void main(final String[] args) throws IOException, RDFParseExcepti /* An RDF Model is obtained from parsing the RDF/XML resource. */ final Model rdfModelISWC2016 = parseRdfResource(inputStreamISWC2016, rdfXMLResourceFile.toURI(), RDFFormat.RDFXML); - + /* * Using vlog4j-rdf library, we convert RDF Model triples to facts, each having * the ternary predicate "TRIPLE". @@ -134,7 +130,7 @@ public static void main(final String[] args) throws IOException, RDFParseExcepti System.out.println("Failed to parse rules: " + e.getMessage()); return; } - + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); /* * The rule that maps people to their organization name based on facts extracted From 2e4a89e21dd53a6300dc651b3c0225c96543d560 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 20 Aug 2019 15:20:47 +0200 Subject: [PATCH 0127/1003] New model element for facts --- .../vlog4j/core/model/api/Fact.java | 32 +++++++++++ .../model/implementation/Expressions.java | 29 ++++++++++ .../core/model/implementation/FactImpl.java | 46 +++++++++++++++ .../vlog4j/core/model/FactTest.java | 57 +++++++++++++++++++ 4 files changed, 164 insertions(+) create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Fact.java create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java create mode 100644 vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Fact.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Fact.java new file mode 100644 index 000000000..bb24292e3 --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Fact.java @@ -0,0 +1,32 @@ +package org.semanticweb.vlog4j.core.model.api; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * A fact is a positive (non-negated) literal that contains only constants as + * its terms, but no variables. + * + * @author Markus Kroetzsch + * + */ +public interface Fact extends PositiveLiteral { + +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java index ed26eb363..7a89a56a6 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java @@ -28,6 +28,7 @@ import org.eclipse.jdt.annotation.NonNull; import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; @@ -97,6 +98,34 @@ public static Predicate makePredicate(String name, int arity) { return new PredicateImpl(name, arity); } + /** + * Creates a {@code Fact}. + * + * @param predicateName non-blank {@link Predicate} name + * @param terms non-empty, non-null list of non-null terms that are + * constants + * @return a {@link Fact} with given {@code terms} and {@link Predicate} + * constructed from name given {@code predicateName} and {@code arity} + * given {@code terms} size. + */ + public static Fact makeFact(final String predicateName, final List terms) { + final Predicate predicate = makePredicate(predicateName, terms.size()); + + return new FactImpl(predicate, terms); + } + + /** + * Creates a {@code Fact}. + * + * @param predicate a non-null {@link Predicate} + * @param terms non-empty, non-null list of non-null terms. List size must + * be the same as the given {@code predicate} arity. + * @return a {@link Fact} corresponding to the input. + */ + public static Fact makeFact(final Predicate predicate, final List terms) { + return new FactImpl(predicate, terms); + } + /** * Creates a {@code PositiveLiteral}. * diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java new file mode 100644 index 000000000..d3533d0b3 --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java @@ -0,0 +1,46 @@ +package org.semanticweb.vlog4j.core.model.implementation; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.List; + +import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.model.api.TermType; + +/** + * Standard implementation of the {@Fact} interface. + * + * @author Markus Kroetzsch + * + */ +public class FactImpl extends PositiveLiteralImpl implements Fact { + + public FactImpl(Predicate predicate, List terms) { + super(predicate, terms); + for (Term t : terms) { + if (t.getType() != TermType.CONSTANT) + throw new IllegalArgumentException("Facts cannot contain variables."); + } + } + +} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java new file mode 100644 index 000000000..9e457f6b7 --- /dev/null +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java @@ -0,0 +1,57 @@ +package org.semanticweb.vlog4j.core.model; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.util.Arrays; + +import org.junit.Test; +import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Variable; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.model.implementation.FactImpl; + +public class FactTest { + + @Test + public void factsConstructor() { + Predicate p = Expressions.makePredicate("p", 2); + Constant c = Expressions.makeConstant("c"); + Constant d = Expressions.makeConstant("d"); + Fact f1 = Expressions.makeFact(p, Arrays.asList(c, d)); + Fact f2 = Expressions.makeFact("p", Arrays.asList(c, d)); + Fact f3 = new FactImpl(p, Arrays.asList(c, d)); + assertEquals(f1, f2); + assertEquals(f1, f3); + assertEquals(f2, f3); + } + + @Test(expected = IllegalArgumentException.class) + public void factsOnlyContainConstants() { + Predicate p = Expressions.makePredicate("p", 1); + Variable x = Expressions.makeVariable("X"); + new FactImpl(p, Arrays.asList(x)); + } + +} From bd1bcaa2412c630dd4f1486a7d9c2cda27555aac Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 20 Aug 2019 16:32:30 +0200 Subject: [PATCH 0128/1003] Use Fact in all modules --- .../core/model/implementation/FactImpl.java | 2 +- .../vlog4j/core/reasoner/KnowledgeBase.java | 11 +- .../implementation/VLogKnowledgeBase.java | 19 +- .../vlog4j/core/reasoner/LoggingTest.java | 5 +- .../core/reasoner/ReasonerTimeoutTest.java | 32 ++-- .../implementation/AddDataSourceTest.java | 13 +- .../implementation/AnswerQueryTest.java | 39 ++-- .../ExportQueryAnswersToCsvFileTest.java | 7 +- .../GeneratedAnonymousIndividualsTest.java | 17 +- .../LoadDataFromMemoryTest.java | 20 +- .../implementation/ReasonerStateTest.java | 8 +- .../reasoner/implementation/ReasonerTest.java | 5 +- .../StratifiedNegationTest.java | 19 +- .../core/ConfigureReasonerLogging.java | 5 +- .../examples/graal/AddDataFromDlgpFile.java | 2 +- .../examples/graal/AddDataFromGraal.java | 2 +- .../owlapi/OwlOntologyToRulesAndFacts.java | 3 +- .../examples/rdf/AddDataFromRdfModel.java | 5 +- .../graal/GraalToVLog4JModelConverter.java | 33 ++++ .../owlapi/OwlAxiomToRulesConverter.java | 8 +- .../owlapi/OwlToRulesConversionHelper.java | 15 ++ .../vlog4j/owlapi/OwlToRulesConverter.java | 4 +- .../semanticweb/vlog4j/parser/RuleParser.java | 3 +- .../vlog4j/parser/javacc/JavaCCParser.jj | 29 ++- .../parser/javacc/JavaCCParserBase.java | 6 +- .../vlog4j/rdf/RdfModelConverter.java | 24 ++- .../semanticweb/vlog4j/rdf/RdfTestUtils.java | 6 +- .../vlog4j/rdf/TestConvertRdfFileToFacts.java | 180 ++++++++++-------- .../vlog4j/rdf/TestReasonOverRdfFacts.java | 9 +- 29 files changed, 312 insertions(+), 219 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java index d3533d0b3..1d1e9ce7b 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java @@ -38,7 +38,7 @@ public class FactImpl extends PositiveLiteralImpl implements Fact { public FactImpl(Predicate predicate, List terms) { super(predicate, terms); for (Term t : terms) { - if (t.getType() != TermType.CONSTANT) + if (t.getType() == TermType.VARIABLE) throw new IllegalArgumentException("Facts cannot contain variables."); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java index d81ff85ca..6ed7b2350 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -6,6 +6,7 @@ import org.eclipse.jdt.annotation.NonNull; import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Rule; @@ -70,7 +71,7 @@ public abstract class KnowledgeBase extends Observable { * @return list of {@link Rule} */ public abstract List getRules(); - + /** * Adds non-null facts to the knowledge base. A fact is a * {@link PositiveLiteral} with all terms ({@link PositiveLiteral#getTerms()}) @@ -90,7 +91,7 @@ public abstract class KnowledgeBase extends Observable { * {@link TermType#CONSTANT}. */ // TODO add examples to javadoc about multiple sources per predicate and EDB/IDB - public abstract void addFacts(@NonNull PositiveLiteral... facts); + public abstract void addFacts(Fact... facts); /** * Adds non-null facts to the knowledge base. A fact is a @@ -110,12 +111,12 @@ public abstract class KnowledgeBase extends Observable { * {@link TermType#CONSTANT}. */ // TODO add examples to javadoc about multiple sources per predicate and EDB/IDB - public abstract void addFacts(@NonNull Collection facts); + public abstract void addFacts(Collection facts); /** * Adds facts stored in given {@code dataSource} for given {@code predicate} to - * the knowledge base. Facts predicates cannot have multiple - * data sources, including in-memory {@link Atom} objects added trough + * the knowledge base. Facts predicates cannot have multiple data + * sources, including in-memory {@link Atom} objects added trough * {@link #addFacts}. * * @param predicate the {@link Predicate} for which the given diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java index f02752882..aa9f3e310 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java @@ -34,6 +34,7 @@ import org.apache.commons.lang3.Validate; import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; @@ -47,8 +48,6 @@ public class VLogKnowledgeBase extends KnowledgeBase { private final Map> factsForPredicate = new HashMap<>(); private final Map dataSourceForPredicate = new HashMap<>(); - - @Override public void addRules(final Rule... rules) { addRules(Arrays.asList(rules)); @@ -83,9 +82,8 @@ public List getRules() { return Collections.unmodifiableList(this.rules); } - @Override - public void addFacts(final PositiveLiteral... facts) { + public void addFacts(final Fact... facts) { addFacts(Arrays.asList(facts)); // TODO setChanged @@ -93,7 +91,7 @@ public void addFacts(final PositiveLiteral... facts) { } @Override - public void addFacts(final Collection facts) { + public void addFacts(final Collection facts) { Validate.noNullElements(facts, "Null facts are not alowed! The list contains a fact at position [%d]."); for (final PositiveLiteral fact : facts) { validateFactTermsAreConstant(fact); @@ -117,11 +115,11 @@ public void addFactsFromDataSource(final Predicate predicate, final DataSource d this.dataSourceForPredicate.put(predicate, dataSource); } - + boolean hasFacts() { return !this.dataSourceForPredicate.isEmpty() || !this.factsForPredicate.isEmpty(); } - + Map getDataSourceForPredicate() { return this.dataSourceForPredicate; } @@ -129,7 +127,7 @@ Map getDataSourceForPredicate() { Map> getFactsForPredicate() { return this.factsForPredicate; } - + Set getEdbPredicates() { // TODO use cache return collectEdbPredicates(); @@ -139,7 +137,7 @@ Set getIdbPredicates() { // TODO use cache return collectIdbPredicates(); } - + String generateDataSourcesConfig() { final StringBuilder configStringBuilder = new StringBuilder(); int dataSourceIndex = 0; @@ -153,7 +151,7 @@ String generateDataSourcesConfig() { } return configStringBuilder.toString(); } - + void validateEdbIdbSeparation() throws EdbIdbSeparationException { final Set edbPredicates = getEdbPredicates(); final Set idbPredicates = getIdbPredicates(); @@ -164,7 +162,6 @@ void validateEdbIdbSeparation() throws EdbIdbSeparationException { } } - private void validateFactTermsAreConstant(PositiveLiteral fact) { final Set nonConstantTerms = new HashSet<>(fact.getTerms()); nonConstantTerms.removeAll(fact.getConstants()); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java index cbeaecaa9..458f29f87 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java @@ -29,12 +29,14 @@ */ import java.io.IOException; +import java.util.Arrays; import org.junit.Test; import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; @@ -42,7 +44,6 @@ import org.semanticweb.vlog4j.core.reasoner.implementation.VLogKnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; - public class LoggingTest { public static final String LOGS_FOLDER = "src/test/data/logs/"; @@ -54,7 +55,7 @@ public class LoggingTest { private static final Rule rule = Expressions.makeRule(ruleHeadQx, ruleBodyPx); private static final Constant constantC = Expressions.makeConstant("c"); - private static final PositiveLiteral factPc = Expressions.makePositiveLiteral("p", constantC); + private static final Fact factPc = Expressions.makeFact("p", Arrays.asList(constantC)); private static final VLogKnowledgeBase kb = new VLogKnowledgeBase(); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java index d4b70170b..77a02d88d 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java @@ -22,6 +22,7 @@ * #L% */ +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeFact; import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePositiveLiteral; import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePredicate; import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeRule; @@ -29,6 +30,7 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import org.junit.After; @@ -39,6 +41,7 @@ import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Rule; @@ -46,7 +49,6 @@ import org.semanticweb.vlog4j.core.reasoner.implementation.VLogKnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; - /** * Test case ensuring {@link Reasoner#setReasoningTimeout(Integer)} works as * expected and terminates reasoning after the given {@link #timeout}. Results @@ -66,7 +68,7 @@ public class ReasonerTimeoutTest { /** * A list of facts to be used in multiple test runs. */ - private static List facts = new ArrayList<>(); + private static List facts = new ArrayList<>(); /** * A list of rules to be used in multiple test runs. */ @@ -84,36 +86,34 @@ public class ReasonerTimeoutTest { public Timeout globalTimeout = Timeout.seconds(timeout + 1); /** - * This method provides the {@link #facts} and {@link #rules} to be used in all test runs. - * To test if the timeout works as expected, a small set of facts and rules is used that results in an infinite chase. - * Facts: - * infinite_EDB(A, B) - * Rules: - * infinite_IDB(?x, ?y) :- infinite_EDB(?x, ?y) - * infinite_IDB(?y, ?z) :- infinite_IDB(?x, ?y) + * This method provides the {@link #facts} and {@link #rules} to be used in all + * test runs. To test if the timeout works as expected, a small set of facts and + * rules is used that results in an infinite chase. Facts: infinite_EDB(A, B) + * Rules: infinite_IDB(?x, ?y) :- infinite_EDB(?x, ?y) infinite_IDB(?y, ?z) :- + * infinite_IDB(?x, ?y) */ @BeforeClass public static void setUpBeforeClass() { final Predicate infinite_EDB = makePredicate("infinite_EDB", 2); final Predicate infinite_IDB = makePredicate("infinite_IDB", 2); - - facts.add(makePositiveLiteral(infinite_EDB, makeConstant("A"), makeConstant("B"))); - + + facts.add(makeFact(infinite_EDB, Arrays.asList(makeConstant("A"), makeConstant("B")))); + final Variable x = makeVariable("x"); final Variable y = makeVariable("y"); final PositiveLiteral infinite_IDB_xy = makePositiveLiteral(infinite_IDB, x, y); final PositiveLiteral infinite_EDB_xy = makePositiveLiteral(infinite_EDB, x, y); - + final Rule import_rule = makeRule(infinite_IDB_xy, infinite_EDB_xy); rules.add(import_rule); - + final Variable z = makeVariable("z"); - + final PositiveLiteral infinite_IDB_yz = makePositiveLiteral(infinite_IDB, y, z); final Rule infinite_rule = makeRule(infinite_IDB_yz, infinite_IDB_xy); rules.add(infinite_rule); - + kb.addRules(rules); kb.addFacts(facts); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java index 78a668266..6dca2128a 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java @@ -24,6 +24,7 @@ import java.io.File; import java.io.IOException; +import java.util.Arrays; import java.util.List; import java.util.Set; @@ -34,7 +35,7 @@ import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.Expressions; @@ -51,8 +52,8 @@ public void testAddDataSourceExistentDataForDifferentPredicates() throws Reasone EdbIdbSeparationException, EDBConfigurationException, IOException, IncompatiblePredicateArityException { final Predicate predicateParity1 = Expressions.makePredicate("p", 1); final Constant constantA = Expressions.makeConstant("a"); - final PositiveLiteral factPredicatePArity2 = Expressions.makePositiveLiteral("p", constantA, constantA); - final PositiveLiteral factPredicateQArity1 = Expressions.makePositiveLiteral("q", constantA); + final Fact factPredicatePArity2 = Expressions.makeFact("p", Arrays.asList(constantA, constantA)); + final Fact factPredicateQArity1 = Expressions.makeFact("q", Arrays.asList(constantA)); final Predicate predicateLArity1 = Expressions.makePredicate("l", 1); final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); @@ -145,8 +146,8 @@ public void testAddDataSourceNoMultipleDataSourcesForPredicate() throws Reasoner public void testAddDataSourceNoFactsForPredicate() throws ReasonerStateException, IOException { final Predicate predicate = Expressions.makePredicate("p", 1); final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); - final PositiveLiteral fact = Expressions.makePositiveLiteral(Expressions.makePredicate("p", 1), - Expressions.makeConstant("a")); + final Fact fact = Expressions.makeFact(Expressions.makePredicate("p", 1), + Arrays.asList(Expressions.makeConstant("a"))); final VLogKnowledgeBase kb = new VLogKnowledgeBase(); kb.addFacts(fact); @@ -166,7 +167,7 @@ public void testAddDataSourcePredicateNotNull() throws ReasonerStateException, I @Test(expected = NullPointerException.class) public void testAddDataSourceNotNullDataSource() throws ReasonerStateException { final Predicate predicate = Expressions.makePredicate("p", 1); - + final KnowledgeBase kb = new VLogKnowledgeBase(); kb.addFactsFromDataSource(predicate, null); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java index 9aac62930..12ab2b868 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java @@ -38,6 +38,7 @@ import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Rule; @@ -63,12 +64,11 @@ public void testEDBQuerySameConstantSubstitutesSameVariableName() final Variable x = Expressions.makeVariable("X"); final Variable y = Expressions.makeVariable("Y"); final Variable z = Expressions.makeVariable("Z"); - final PositiveLiteral fact = Expressions.makePositiveLiteral(predicate, constantC, constantC, constantD); + final Fact fact = Expressions.makeFact(predicate, Arrays.asList(constantC, constantC, constantD)); final boolean includeBlanks = false; @SuppressWarnings("unchecked") final Set> factCCD = Sets.newSet(Arrays.asList(constantC, constantC, constantD)); - try (final Reasoner reasoner = Reasoner.getInstance()) { final KnowledgeBase kb = reasoner.getKnowledgeBase(); @@ -112,10 +112,10 @@ public void testIDBQuerySameBlankSubstitutesSameVariableName() final Rule pX__pYY_pYZ = Expressions.makeRule(Expressions.makePositiveConjunction(pYY, pYZ), Expressions.makeConjunction(Expressions.makePositiveLiteral(predicate, x))); assertEquals(Sets.newSet(y, z), pX__pYY_pYZ.getExistentiallyQuantifiedVariables()); - + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); kb.addRules(pX__pYY_pYZ); - kb.addFacts(Expressions.makePositiveLiteral(predicate, Expressions.makeConstant("c"))); + kb.addFacts(Expressions.makeFact(predicate, Arrays.asList(Expressions.makeConstant("c")))); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); @@ -155,8 +155,8 @@ public void testIDBQuerySameIndividualSubstitutesSameVariableName() assertEquals(Sets.newSet(z, t), pXY__pXYYZZT.getExistentiallyQuantifiedVariables()); final Constant constantC = Expressions.makeConstant("c"); final Constant constantD = Expressions.makeConstant("d"); - final PositiveLiteral factPcd = Expressions.makePositiveLiteral(predicate, constantC, constantD); - + final Fact factPcd = Expressions.makeFact(predicate, Arrays.asList(constantC, constantD)); + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); kb.addRules(pXY__pXYYZZT); kb.addFacts(factPcd); @@ -214,14 +214,16 @@ public void testIDBQuerySameIndividualSubstitutesSameVariableName() } @Test - public void queryResultWithBlanks() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void queryResultWithBlanks() + throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { final Variable vx = Expressions.makeVariable("x"); final Variable vy = Expressions.makeVariable("y"); // P(x) -> Q(y) - final Rule existentialRule = Expressions.makeRule(Expressions.makePositiveLiteral("q", vy), Expressions.makePositiveLiteral("p", vx)); + final Rule existentialRule = Expressions.makeRule(Expressions.makePositiveLiteral("q", vy), + Expressions.makePositiveLiteral("p", vx)); assertEquals(Sets.newSet(vy), existentialRule.getExistentiallyQuantifiedVariables()); final Constant constantC = Expressions.makeConstant("c"); - final PositiveLiteral fact = Expressions.makePositiveLiteral("p", constantC); + final Fact fact = Expressions.makeFact("p", Arrays.asList(constantC)); final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("q", Expressions.makeVariable("?x")); final VLogKnowledgeBase kb = new VLogKnowledgeBase(); @@ -248,9 +250,10 @@ public void queryResultWithBlanks() throws ReasonerStateException, EdbIdbSeparat } @Test - public void queryEmptyKnowledgeBase() throws IOException, EdbIdbSeparationException, ReasonerStateException, IncompatiblePredicateArityException { + public void queryEmptyKnowledgeBase() + throws IOException, EdbIdbSeparationException, ReasonerStateException, IncompatiblePredicateArityException { final VLogKnowledgeBase kb = new VLogKnowledgeBase(); - + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); @@ -268,11 +271,12 @@ public void queryEmptyKnowledgeBase() throws IOException, EdbIdbSeparationExcept } @Test - public void queryEmptyRules() throws IOException, EdbIdbSeparationException, ReasonerStateException, IncompatiblePredicateArityException { + public void queryEmptyRules() + throws IOException, EdbIdbSeparationException, ReasonerStateException, IncompatiblePredicateArityException { final VLogKnowledgeBase kb = new VLogKnowledgeBase(); - final PositiveLiteral fact = Expressions.makePositiveLiteral("P", Expressions.makeConstant("c")); + final Fact fact = Expressions.makeFact("P", Arrays.asList(Expressions.makeConstant("c"))); kb.addFacts(fact); - + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); @@ -290,10 +294,11 @@ public void queryEmptyRules() throws IOException, EdbIdbSeparationException, Rea } @Test - public void queryEmptyFacts() - throws EDBConfigurationException, IOException, EdbIdbSeparationException, ReasonerStateException, IncompatiblePredicateArityException { + public void queryEmptyFacts() throws EDBConfigurationException, IOException, EdbIdbSeparationException, + ReasonerStateException, IncompatiblePredicateArityException { final Variable vx = Expressions.makeVariable("x"); - final Rule rule = Expressions.makeRule(Expressions.makePositiveLiteral("q", vx), Expressions.makePositiveLiteral("p", vx)); + final Rule rule = Expressions.makeRule(Expressions.makePositiveLiteral("q", vx), + Expressions.makePositiveLiteral("p", vx)); final VLogKnowledgeBase kb = new VLogKnowledgeBase(); kb.addRules(rule); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExportQueryAnswersToCsvFileTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExportQueryAnswersToCsvFileTest.java index a8abc1fd8..f1a204de2 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExportQueryAnswersToCsvFileTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExportQueryAnswersToCsvFileTest.java @@ -32,6 +32,7 @@ import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; @@ -47,12 +48,12 @@ public void testEDBQuerySameConstantSubstitutesSameVariableName() final Variable x = Expressions.makeVariable("X"); final Variable y = Expressions.makeVariable("Y"); final Variable z = Expressions.makeVariable("Z"); - final PositiveLiteral fact = Expressions.makePositiveLiteral(predicate, constantC, constantC, constantD); + final Fact fact = Expressions.makeFact(predicate, Arrays.asList(constantC, constantC, constantD)); final boolean includeBlanks = false; // final String csvFilePath = CSV_EXPORT_FOLDER + "output"; final List> factCCD = Arrays.asList(Arrays.asList("c", "c", "d")); - + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); kb.addFacts(fact); @@ -91,7 +92,7 @@ public void testExportQueryEmptyKnowledgeBase() throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("p", Expressions.makeVariable("?x"), Expressions.makeVariable("?y")); - + final VLogKnowledgeBase kb = new VLogKnowledgeBase(); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java index d98b11dcf..27f73c4a5 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java @@ -25,6 +25,7 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; +import java.util.Arrays; import java.util.List; import org.junit.Test; @@ -33,6 +34,7 @@ import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; @@ -60,7 +62,7 @@ public class GeneratedAnonymousIndividualsTest { private static VLogKnowledgeBase kb = new VLogKnowledgeBase(); // fact: P(c) private static final Constant constantC = Expressions.makeConstant("c"); - private static final PositiveLiteral fact = Expressions.makePositiveLiteral(p, constantC); + private static final Fact fact = Expressions.makeFact(p, Arrays.asList(constantC)); // query: P(?x,?y) ? final PositiveLiteral queryAtom = Expressions.makePositiveLiteral(p, Expressions.makeVariable("?x"), @@ -97,8 +99,9 @@ public void testBlanksSkolemChaseSplitHeadPieces() try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); - // the rule {P(?x) -> P(?x,!y), P(?x,!z)} after split becomes: - // { {P(?x) -> P(?x,!y,!z)}, {P(?x,?y,?z) ->, P(?x,?y)}, {P(?x,?y,?z) ->, P(?x,?z)} } + // the rule {P(?x) -> P(?x,!y), P(?x,!z)} after split becomes: + // { {P(?x) -> P(?x,!y,!z)}, {P(?x,?y,?z) ->, P(?x,?y)}, {P(?x,?y,?z) ->, + // P(?x,?z)} } reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.SPLIT_HEAD_PIECES); reasoner.load(); @@ -132,10 +135,11 @@ public void testBlanksRestrictedChaseSplitHeadPieces() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { try (final Reasoner reasoner = new VLogReasoner(kb)) { - + reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); - // the rule {P(?x) -> P(?x,!y), P(?x,!z)} after split becomes: - // { {P(?x) -> P(?x,!y,!z)}, {P(?x,?y,?z) ->, P(?x,?y)}, {P(?x,?y,?z) ->, P(?x,?z)} } + // the rule {P(?x) -> P(?x,!y), P(?x,!z)} after split becomes: + // { {P(?x) -> P(?x,!y,!z)}, {P(?x,?y,?z) ->, P(?x,?y)}, {P(?x,?y,?z) ->, + // P(?x,?z)} } reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.SPLIT_HEAD_PIECES); reasoner.load(); @@ -160,7 +164,6 @@ public void testBlanksRestrictedChaseSplitHeadPieces() } } - private void checkTwoDistinctBlanksGenerated(final Reasoner reasoner) throws ReasonerStateException, IOException, EdbIdbSeparationException { // expected facts: P(c, _:b1), P(c, _:b2) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java index b5fa9d13b..cd1ec134c 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java @@ -21,11 +21,13 @@ */ import java.io.IOException; +import java.util.Arrays; import org.junit.Test; import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; @@ -42,9 +44,9 @@ public void loadEdbIdbNotSeparated() throws EDBConfigurationException, IOExcepti final Variable vx = Expressions.makeVariable("x"); final Rule rule = Expressions.makeRule(Expressions.makePositiveLiteral("q", vx), Expressions.makePositiveLiteral("p", vx)); - final PositiveLiteral factIDBpredQ1 = Expressions.makePositiveLiteral("q", Expressions.makeConstant("c")); - final PositiveLiteral factEDBpredQ2 = Expressions.makePositiveLiteral("q", Expressions.makeConstant("d"), - Expressions.makeConstant("d")); + final Fact factIDBpredQ1 = Expressions.makeFact("q", Arrays.asList(Expressions.makeConstant("c"))); + final Fact factEDBpredQ2 = Expressions.makeFact("q", + Arrays.asList(Expressions.makeConstant("d"), Expressions.makeConstant("d"))); final VLogKnowledgeBase kb = new VLogKnowledgeBase(); kb.addRules(rule); kb.addFacts(factIDBpredQ1, factEDBpredQ2); @@ -60,8 +62,8 @@ public void loadEdbIdbSeparated() throws EDBConfigurationException, IOException, final Variable vx = Expressions.makeVariable("x"); final Rule rule = Expressions.makeRule(Expressions.makePositiveLiteral("q", vx), Expressions.makePositiveLiteral("p", vx)); - final PositiveLiteral factEDBpred = Expressions.makePositiveLiteral("q", Expressions.makeConstant("d"), - Expressions.makeConstant("d")); + final Fact factEDBpred = Expressions.makeFact("q", + Arrays.asList(Expressions.makeConstant("d"), Expressions.makeConstant("d"))); final VLogKnowledgeBase kb = new VLogKnowledgeBase(); kb.addRules(rule); kb.addFacts(factEDBpred); @@ -74,8 +76,8 @@ public void loadEdbIdbSeparated() throws EDBConfigurationException, IOException, // TODO move to a test class for KnowledgeBase @Test(expected = IllegalArgumentException.class) public void addFactsWithVariableTerms() throws ReasonerStateException { - final PositiveLiteral factWithVariableTerms = Expressions.makePositiveLiteral("q", - Expressions.makeConstant("d"), Expressions.makeVariable("x")); + final Fact factWithVariableTerms = Expressions.makeFact("q", + Arrays.asList(Expressions.makeConstant("d"), Expressions.makeVariable("x"))); final KnowledgeBase kb = new VLogKnowledgeBase(); kb.addFacts(factWithVariableTerms); } @@ -83,8 +85,8 @@ public void addFactsWithVariableTerms() throws ReasonerStateException { // TODO move to a test class for KnowledgeBase @Test(expected = IllegalArgumentException.class) public void addFactsWithBlankTerms() throws ReasonerStateException { - final PositiveLiteral factWithBlankTerms = Expressions.makePositiveLiteral("q", Expressions.makeConstant("d"), - new BlankImpl("b")); + final Fact factWithBlankTerms = Expressions.makeFact("q", Arrays.asList(Expressions.makeConstant("d"), + new BlankImpl("b"))); final KnowledgeBase kb = new VLogKnowledgeBase(); kb.addFacts(factWithBlankTerms); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java index 0ffc12192..9b3d76288 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java @@ -38,6 +38,7 @@ import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Rule; @@ -61,7 +62,7 @@ public class ReasonerStateTest { private static final PositiveLiteral ruleHeadQx = Expressions.makePositiveLiteral(q, x); private static final PositiveLiteral ruleBodyPx = Expressions.makePositiveLiteral(p, x); private static final Rule ruleQxPx = Expressions.makeRule(ruleHeadQx, ruleBodyPx); - private static final PositiveLiteral factPc = Expressions.makePositiveLiteral(p, c); + private static final Fact factPc = Expressions.makeFact(p, Arrays.asList(c)); // private static final Atom factPd = Expressions.makeAtom(q, d); @Test(expected = NullPointerException.class) @@ -126,7 +127,7 @@ public void testAddFacts1() public void testAddFacts2() throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { final VLogKnowledgeBase kb = new VLogKnowledgeBase(); - final List facts = new ArrayList<>(); + final List facts = new ArrayList<>(); facts.add(factPc); facts.add(null); kb.addFacts(facts); @@ -307,8 +308,7 @@ public void testFailAnswerQueryBeforeLoad() throws ReasonerStateException { } @Test(expected = ReasonerStateException.class) - public void testFailExportQueryAnswerToCsvBeforeLoad() - throws ReasonerStateException, IOException { + public void testFailExportQueryAnswerToCsvBeforeLoad() throws ReasonerStateException, IOException { try (final Reasoner reasoner = Reasoner.getInstance()) { reasoner.exportQueryAnswersToCsv(exampleQueryAtom, FileDataSourceTestUtils.OUTPUT_FOLDER + "output.csv", true); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerTest.java index 6996429fe..e7dbb4aca 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerTest.java @@ -35,6 +35,7 @@ import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Term; @@ -51,8 +52,8 @@ public class ReasonerTest { final Constant constantC = Expressions.makeConstant(constantNameC); final Constant constantD = Expressions.makeConstant(constantNameD); final Variable x = Expressions.makeVariable("x"); - final PositiveLiteral factAc = Expressions.makePositiveLiteral("A", constantC); - final PositiveLiteral factAd = Expressions.makePositiveLiteral("A", constantD); + final Fact factAc = Expressions.makeFact("A", Arrays.asList(constantC)); + final Fact factAd = Expressions.makeFact("A", Arrays.asList(constantD)); final PositiveLiteral atomAx = Expressions.makePositiveLiteral("A", x); final PositiveLiteral atomBx = Expressions.makePositiveLiteral("B", x); final PositiveLiteral atomCx = Expressions.makePositiveLiteral("C", x); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/StratifiedNegationTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/StratifiedNegationTest.java index c7334d24f..a4b2e33c8 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/StratifiedNegationTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/StratifiedNegationTest.java @@ -26,6 +26,7 @@ import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeConstant; import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeNegativeLiteral; import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePositiveLiteral; +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeFact; import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeRule; import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeVariable; @@ -38,12 +39,12 @@ import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Literal; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; - public class StratifiedNegationTest { @Test(expected = EdbIdbSeparationException.class) @@ -58,7 +59,7 @@ public void testNotStratifiableEdbIdbSeparation() final PositiveLiteral qXY = makePositiveLiteral("Q", x, y); final Rule rule = makeRule(qXY, pXY, notQXY); - final PositiveLiteral fact = makePositiveLiteral("Q", makeConstant("c"), makeConstant("d")); + final Fact fact = makeFact("Q", Arrays.asList(makeConstant("c"), makeConstant("d"))); final VLogKnowledgeBase kb = new VLogKnowledgeBase(); kb.addRules(rule); @@ -81,7 +82,7 @@ public void testNotStratifiable() final PositiveLiteral qXY = makePositiveLiteral("Q", x, y); final Rule rule = makeRule(qXY, pXY, notQXY); - final PositiveLiteral fact = makePositiveLiteral("P", makeConstant("c"), makeConstant("d")); + final Fact fact = makeFact("P", Arrays.asList(makeConstant("c"), makeConstant("d"))); final VLogKnowledgeBase kb = new VLogKnowledgeBase(); kb.addRules(rule); @@ -106,12 +107,12 @@ public void testStratifiable() final PositiveLiteral sXY = makePositiveLiteral("S", x, y); final Rule rule = makeRule(sXY, pXY, notQXY, notRXY); - final PositiveLiteral pCD = makePositiveLiteral("P", makeConstant("c"), makeConstant("d")); + final Fact pCD = makeFact("P", Arrays.asList(makeConstant("c"), makeConstant("d"))); final Constant e = makeConstant("e"); final Constant f = makeConstant("f"); - final PositiveLiteral pEF = makePositiveLiteral("P", e, f); + final Fact pEF = makeFact("P", Arrays.asList(e, f)); - final PositiveLiteral qCD = makePositiveLiteral("Q", makeConstant("c"), makeConstant("d")); + final Fact qCD = makeFact("Q", Arrays.asList(makeConstant("c"), makeConstant("d"))); final VLogKnowledgeBase kb = new VLogKnowledgeBase(); kb.addRules(rule); @@ -142,12 +143,12 @@ public void testInputNegation() final PositiveLiteral sXY = makePositiveLiteral("S", x, y); final Rule rule = makeRule(sXY, pXY, notQXY); - final PositiveLiteral pCD = makePositiveLiteral("P", makeConstant("c"), makeConstant("d")); + final Fact pCD = makeFact("P", Arrays.asList(makeConstant("c"), makeConstant("d"))); final Constant e = makeConstant("e"); final Constant f = makeConstant("f"); - final PositiveLiteral pEF = makePositiveLiteral("P", e, f); + final Fact pEF = makeFact("P", Arrays.asList(e, f)); - final PositiveLiteral qCD = makePositiveLiteral("Q", makeConstant("c"), makeConstant("d")); + final Fact qCD = makeFact("Q", Arrays.asList(makeConstant("c"), makeConstant("d"))); final VLogKnowledgeBase kb = new VLogKnowledgeBase(); kb.addRules(rule); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java index dae2c01a7..b80c368b8 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java @@ -37,8 +37,9 @@ import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.LogLevel; import org.semanticweb.vlog4j.core.reasoner.Reasoner; @@ -92,7 +93,7 @@ public class ConfigureReasonerLogging { makeConjunction(makePositiveLiteral("B", makeVariable("x"), makeVariable("y"))))); /* A(c,d) */ - private static @NonNull PositiveLiteral fact = makePositiveLiteral("A_EDB", makeConstant("c"), makeConstant("d")); + private static Fact fact = Expressions.makeFact("A_EDB", Arrays.asList(makeConstant("c"), makeConstant("d"))); public static void main(final String[] args) throws EdbIdbSeparationException, IncompatiblePredicateArityException, IOException, ReasonerStateException { diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java index 5181aa25d..b3ede8c41 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java @@ -111,7 +111,7 @@ public static void main(final String[] args) for (final GraalConjunctiveQueryToRule graalConjunctiveQueryToRule : convertedConjunctiveQueries) { kb.addRules(graalConjunctiveQueryToRule.getRule()); } - kb.addFacts(GraalToVLog4JModelConverter.convertAtoms(graalAtoms)); + kb.addFacts(GraalToVLog4JModelConverter.convertAtomsToFacts(graalAtoms)); reasoner.load(); System.out.println("Before materialisation:"); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java index f9bb66b90..bbf6ad18c 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java @@ -128,7 +128,7 @@ public static void main(final String[] args) final VLogKnowledgeBase kb = new VLogKnowledgeBase(); kb.addRules(GraalToVLog4JModelConverter.convertRules(graalRules)); kb.addRules(convertedGraalConjunctiveQuery.getRule()); - kb.addFacts(GraalToVLog4JModelConverter.convertAtoms(graalAtoms)); + kb.addFacts(GraalToVLog4JModelConverter.convertAtomsToFacts(graalAtoms)); try (Reasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java index c2fdc33aa..13658c02b 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java @@ -33,6 +33,7 @@ import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Term; @@ -79,7 +80,7 @@ public static void main(final String[] args) throws OWLOntologyCreationException /* Print out Facts extracted from bike ontology */ System.out.println("Facts extracted from Bike ontology:"); - final Set facts = owlToRulesConverter.getFacts(); + final Set facts = owlToRulesConverter.getFacts(); for (final PositiveLiteral fact : facts) { System.out.println(" - fact: " + fact); } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java index 3d4525466..b2faf28a8 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java @@ -41,6 +41,7 @@ import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Variable; @@ -82,7 +83,7 @@ public static void main(final String[] args) throws IOException, RDFParseExcepti * Using vlog4j-rdf library, we convert RDF Model triples to facts, each having * the ternary predicate "TRIPLE". */ - final Set tripleFactsISWC2016 = RdfModelConverter.rdfModelToPositiveLiterals(rdfModelISWC2016); + final Set tripleFactsISWC2016 = RdfModelConverter.rdfModelToFacts(rdfModelISWC2016); System.out.println("Example triple fact from iswc-2016 dataset:"); System.out.println(" - " + tripleFactsISWC2016.iterator().next()); @@ -101,7 +102,7 @@ public static void main(final String[] args) throws IOException, RDFParseExcepti * Using vlog4j-rdf library, we convert RDF Model triples to facts, each having * the ternary predicate "TRIPLE". */ - final Set tripleFactsISWC2017 = RdfModelConverter.rdfModelToPositiveLiterals(rdfModelISWC2017); + final Set tripleFactsISWC2017 = RdfModelConverter.rdfModelToFacts(rdfModelISWC2017); System.out.println("Example triple fact from iswc-2017 dataset:"); System.out.println(" - " + tripleFactsISWC2017.iterator().next()); diff --git a/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverter.java b/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverter.java index f939c48d3..c2c94b28a 100644 --- a/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverter.java +++ b/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverter.java @@ -25,6 +25,7 @@ import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePositiveLiteral; import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePositiveLiteralsRule; +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeFact; import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makePredicate; import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeVariable; @@ -35,6 +36,7 @@ import org.apache.commons.lang3.StringUtils; import org.eclipse.jdt.annotation.NonNull; import org.semanticweb.vlog4j.core.model.api.Conjunction; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Rule; @@ -74,6 +76,21 @@ public static PositiveLiteral convertAtom(final fr.lirmm.graphik.graal.api.core. return makePositiveLiteral(predicate, terms); } + /** + * Converts a {@link fr.lirmm.graphik.graal.api.core.Atom Graal Atom} into a + * {@link Fact VLog4J fact}. + * + * @param atom A {@link fr.lirmm.graphik.graal.api.core.Atom Graal Atom} + * @return A {@link Fact VLog4J fact} + * @throws IllegalArgumentException if the converted atom contains terms that + * cannot occur in facts + */ + public static Fact convertAtomToFact(final fr.lirmm.graphik.graal.api.core.Atom atom) { + final Predicate predicate = convertPredicate(atom.getPredicate()); + final List terms = convertTerms(atom.getTerms()); + return makeFact(predicate, terms); + } + /** * Converts a {@link List} of {@link fr.lirmm.graphik.graal.api.core.Atom Graal * Atoms} into a {@link List} of {@link PositiveLiteral VLog4J @@ -91,6 +108,22 @@ public static List convertAtoms(final List convertAtomsToFacts(final List atoms) { + final List result = new ArrayList<>(); + for (final fr.lirmm.graphik.graal.api.core.Atom atom : atoms) { + result.add(convertAtomToFact(atom)); + } + return result; + } + /** * Converts a {@link AtomSet Graal AtomSet} into a {@link Conjunction VLog4J * Conjunction} of {@link PositiveLiteral}s. diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java index 7d9215cff..9aa0cdb72 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java +++ b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java @@ -67,6 +67,7 @@ import org.semanticweb.owlapi.model.SWRLRule; import org.semanticweb.owlapi.util.OWLAxiomVisitorAdapter; import org.semanticweb.vlog4j.core.model.api.Conjunction; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Rule; @@ -74,6 +75,7 @@ import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.model.implementation.FactImpl; import org.semanticweb.vlog4j.core.model.implementation.VariableImpl; /** @@ -87,7 +89,7 @@ public class OwlAxiomToRulesConverter extends OWLAxiomVisitorAdapter implements static OWLDataFactory owlDataFactory = OWLManager.getOWLDataFactory(); final Set rules = new HashSet<>(); - final Set facts = new HashSet<>(); + final Set facts = new HashSet<>(); final Variable frontierVariable = new VariableImpl("X"); int freshVariableCounter = 0; @@ -120,7 +122,7 @@ void addRule(final AbstractClassToRuleConverter converter) { Arrays.asList(OwlToRulesConversionHelper.getTop(converter.mainTerm))); if (headConjunction.getVariables().isEmpty()) { for (final PositiveLiteral conjunct : headConjunction.getLiterals()) { - this.facts.add(conjunct); + this.facts.add(new FactImpl(conjunct.getPredicate(), conjunct.getTerms())); } return; } @@ -260,7 +262,7 @@ public void visit(final OWLObjectPropertyRangeAxiom axiom) { public void visit(final OWLObjectPropertyAssertionAxiom axiom) { final Term subject = OwlToRulesConversionHelper.getIndividualTerm(axiom.getSubject()); final Term object = OwlToRulesConversionHelper.getIndividualTerm(axiom.getObject()); - this.facts.add(OwlToRulesConversionHelper.getObjectPropertyAtom(axiom.getProperty(), subject, object)); + this.facts.add(OwlToRulesConversionHelper.getObjectPropertyFact(axiom.getProperty(), subject, object)); } @Override diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlToRulesConversionHelper.java b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlToRulesConversionHelper.java index b3e563dd3..a7c9ce772 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlToRulesConversionHelper.java +++ b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlToRulesConversionHelper.java @@ -35,11 +35,13 @@ import org.semanticweb.owlapi.model.OWLNamedIndividual; import org.semanticweb.owlapi.model.OWLObjectProperty; import org.semanticweb.owlapi.model.OWLObjectPropertyExpression; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.BlankImpl; import org.semanticweb.vlog4j.core.model.implementation.ConstantImpl; +import org.semanticweb.vlog4j.core.model.implementation.FactImpl; import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; import org.semanticweb.vlog4j.owlapi.AbstractClassToRuleConverter.SimpleConjunction; @@ -143,6 +145,19 @@ public static PositiveLiteral getObjectPropertyAtom(final OWLObjectPropertyExpre } } + public static Fact getObjectPropertyFact(final OWLObjectPropertyExpression owlObjectPropertyExpression, + final Term sourceTerm, final Term targetTerm) { + if (owlObjectPropertyExpression.isAnonymous()) { + final Predicate predicate = OwlToRulesConversionHelper + .getObjectPropertyPredicate(owlObjectPropertyExpression.getInverseProperty().asOWLObjectProperty()); + return new FactImpl(predicate, Arrays.asList(targetTerm, sourceTerm)); + } else { + final Predicate predicate = OwlToRulesConversionHelper + .getObjectPropertyPredicate(owlObjectPropertyExpression.asOWLObjectProperty()); + return new FactImpl(predicate, Arrays.asList(sourceTerm, targetTerm)); + } + } + public static PositiveLiteral getBottom(final Term term) { final Predicate predicate = new PredicateImpl("http://www.w3.org/2002/07/owl#Nothing", 1); return new PositiveLiteralImpl(predicate, Arrays.asList(term)); diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlToRulesConverter.java b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlToRulesConverter.java index d0c681e79..0850ff809 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlToRulesConverter.java +++ b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlToRulesConverter.java @@ -24,7 +24,7 @@ import org.semanticweb.owlapi.model.OWLAxiom; import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Rule; /** @@ -55,7 +55,7 @@ public void addOntology(final OWLOntology owlOntology) { * * @return set of facts */ - public Set getFacts() { + public Set getFacts() { return this.owlAxiomToRulesConverter.facts; } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index cf1018272..26891374d 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -26,6 +26,7 @@ import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Rule; @@ -93,7 +94,7 @@ public List getRules() { return parser.getRules(); } - public List getFacts() { + public List getFacts() { return parser.getFacts(); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index cd9b6bab8..49f1257b7 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -29,6 +29,7 @@ import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSource; @@ -140,19 +141,14 @@ DataSource dataSource() throws PrefixDeclarationException: void statement() throws PrefixDeclarationException: { Rule r; - PositiveLiteral l; + Fact l; resetVariableSets(); } { LOOKAHEAD(rule()) r = rule() { rules.add(r);} -| l = positiveLiteral(FormulaContext.HEAD) < DOT > //not from a rule +| l = fact(FormulaContext.HEAD) < DOT > //not from a rule { - if (l.getVariables().isEmpty()) { - facts.add(l); - } else { - throw new ParseException("Facts with variables are not allowed: " + l.toString()); - } - + facts.add(l); } } @@ -225,6 +221,23 @@ PositiveLiteral positiveLiteral(FormulaContext context) throws PrefixDeclaration { return Expressions.makePositiveLiteral(predicateName, terms); } } +Fact fact(FormulaContext context) throws PrefixDeclarationException: +{ + Token t; + List < Term > terms; + String predicateName; +} +{ + predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > + { + try { + return Expressions.makeFact(predicateName, terms); + } catch (IllegalArgumentException e) { + throw new ParseException("Error parsing fact: " + e.toString()); + } + } +} + NegativeLiteral negativeLiteral(FormulaContext context) throws PrefixDeclarationException: { List < Term > terms; diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 8c80e25bd..39ffdcf18 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -27,7 +27,7 @@ import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; @@ -53,7 +53,7 @@ public class JavaCCParserBase { final PrefixDeclarations prefixDeclarations = new LocalPrefixDeclarations(); final List rules = new ArrayList<>(); - final List facts = new ArrayList<>(); + final List facts = new ArrayList<>(); final List dataSourceDaclarations = new ArrayList<>(); /** @@ -222,7 +222,7 @@ public List getRules() { return rules; } - public List getFacts() { + public List getFacts() { return facts; } diff --git a/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfModelConverter.java b/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfModelConverter.java index 66570a978..d443f169b 100644 --- a/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfModelConverter.java +++ b/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfModelConverter.java @@ -22,6 +22,7 @@ import static org.semanticweb.vlog4j.rdf.RdfValueToTermConverter.rdfValueToTerm; +import java.util.Arrays; import java.util.Set; import java.util.stream.Collectors; @@ -34,6 +35,7 @@ import org.openrdf.model.Value; import org.semanticweb.vlog4j.core.model.api.Blank; import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Term; @@ -45,9 +47,8 @@ * given {@code rdfModel} into an {@link PositiveLiteral} of the form * {@code TRIPLE(subject, predicate, object)}. The ternary predicate used for * all literals generated from RDF triples is - * {@link RdfModelConverter#RDF_TRIPLE_PREDICATE}. Subject, - * predicate and object {@link Value}s are converted to corresponding - * {@link Term}s: + * {@link RdfModelConverter#RDF_TRIPLE_PREDICATE}. Subject, predicate and object + * {@link Value}s are converted to corresponding {@link Term}s: *

    - * @throws IOException - * if I/O exceptions occur during reasoning. - * @throws ReasonerStateException - * if this method is called before loading ({@link Reasoner#load()} - * or after closing ({@link Reasoner#close()}). + * @throws IOException if I/O exceptions occur during + * reasoning. + * @throws ReasonerStateException if this method is called before + * loading ({@link Reasoner#load()} + * or after closing + * ({@link Reasoner#close()}). * @throws IncompatiblePredicateArityException * @throws EdbIdbSeparationException */ @@ -466,8 +464,7 @@ QueryResultIterator answerQuery(@NonNull PositiveLiteral query, boolean includeB */ // TODO update javadoc with return type MaterialisationState exportQueryAnswersToCsv(@NonNull PositiveLiteral query, @NonNull String csvFilePath, - boolean includeBlanks) - throws ReasonerStateException, IOException; + boolean includeBlanks) throws ReasonerStateException, IOException; /** * Resets the reasoner to a pre-loading state (before the call of diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 35e1024ec..816cd0de4 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -4,7 +4,6 @@ import java.util.Formatter; import java.util.HashSet; import java.util.Map; -import java.util.Observable; import java.util.Set; import org.apache.commons.lang3.Validate; @@ -78,7 +77,7 @@ public class VLogReasoner implements Reasoner { public VLogReasoner(KnowledgeBase knowledgeBase) { super(); this.knowledgeBase = knowledgeBase; - this.knowledgeBase.addObserver(this); + // TODO register as listener to KB } @Override @@ -325,7 +324,7 @@ public void resetReasoner() throws ReasonerStateException { public void close() { this.reasonerState = ReasonerState.AFTER_CLOSING; - this.knowledgeBase.deleteObserver(this); + // TODO unregister as listener of KB this.vLog.stop(); } @@ -441,11 +440,4 @@ public CyclicityResult checkForCycles() throws ReasonerStateException, NotStarte } } - @Override - public void update(Observable o, Object arg) { - // TODO update materialisation state for query answering - // TODO compute KB diff - - } - } From fb7f87922f25f101e5836ecff3dad8400c099feb Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 20 Aug 2019 17:32:17 +0200 Subject: [PATCH 0133/1003] Add visitor for Statements --- .../vlog4j/core/model/api/Statement.java | 7 ++++ .../core/model/api/StatementVisitor.java | 36 +++++++++++++++++++ .../DataSourceDeclarationImpl.java | 6 ++++ .../core/model/implementation/FactImpl.java | 6 ++++ .../core/model/implementation/RuleImpl.java | 6 ++++ 5 files changed, 61 insertions(+) create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/StatementVisitor.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Statement.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Statement.java index 54ea0b3d9..c120c1ef9 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Statement.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Statement.java @@ -29,4 +29,11 @@ */ public interface Statement { + /** + * Accept a {@link StatementVisitor} and return its output. + * + * @param statementVisitor the StatementVisitor + * @return output of the visitor + */ + T accept(StatementVisitor statementVisitor); } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/StatementVisitor.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/StatementVisitor.java new file mode 100644 index 000000000..ef707dc2a --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/StatementVisitor.java @@ -0,0 +1,36 @@ +package org.semanticweb.vlog4j.core.model.api; + +/** + * A visitor for the various types of {@link Statement}s in the data model. + * Should be used to avoid any type casting or {@code instanceof} checks when + * processing statements. + * + * @author Markus Krötzsch + */ +public interface StatementVisitor { + + /** + * Visits a {@link Fact} and returns a result. + * + * @param statement the statement to visit + * @return some result + */ + T visit(Fact statement); + + /** + * Visits a {@link Rule} and returns a result. + * + * @param statement the statement to visit + * @return some result + */ + T visit(Rule statement); + + /** + * Visits a {@link DataSourceDeclaration} and returns a result. + * + * @param statement the statement to visit + * @return some result + */ + T visit(DataSourceDeclaration statement); + +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java index 37a5e198e..150475b47 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java @@ -24,6 +24,7 @@ import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.StatementVisitor; /** * Basic implementation for {@link DataSourceDeclaration}. @@ -76,6 +77,11 @@ public boolean equals(final Object obj) { return (this.predicate.equals(other.getPredicate())) && this.dataSource.equals(other.getDataSource()); } + + @Override + public T accept(StatementVisitor statementVisitor) { + return statementVisitor.visit(this); + } @Override public String toString() { diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java index 1d1e9ce7b..a008482d6 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java @@ -24,6 +24,7 @@ import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.StatementVisitor; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.TermType; @@ -42,5 +43,10 @@ public FactImpl(Predicate predicate, List terms) { throw new IllegalArgumentException("Facts cannot contain variables."); } } + + @Override + public T accept(StatementVisitor statementVisitor) { + return statementVisitor.visit(this); + } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java index 8c249f148..6115ebbaa 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java @@ -29,6 +29,7 @@ import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.StatementVisitor; import org.semanticweb.vlog4j.core.model.api.Variable; /** @@ -115,5 +116,10 @@ public Set getExistentiallyQuantifiedVariables() { public Set getUniversallyQuantifiedVariables() { return this.body.getVariables(); } + + @Override + public T accept(StatementVisitor statementVisitor) { + return statementVisitor.visit(this); + } } From 905217f07acd993f3959800c333ba7c9be98faf5 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 20 Aug 2019 17:34:58 +0200 Subject: [PATCH 0134/1003] License header --- .../core/model/api/StatementVisitor.java | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/StatementVisitor.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/StatementVisitor.java index ef707dc2a..fc556f18e 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/StatementVisitor.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/StatementVisitor.java @@ -1,5 +1,25 @@ package org.semanticweb.vlog4j.core.model.api; +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + /** * A visitor for the various types of {@link Statement}s in the data model. * Should be used to avoid any type casting or {@code instanceof} checks when From 98a0308006a85270e2ec2c86ac7ff646a76e7d74 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 20 Aug 2019 23:31:00 +0200 Subject: [PATCH 0135/1003] Advanced KnowledgeBase implementation * removed outdated add methods and avoid them throughout code * added transitionary implementations of previous getters --- .../vlog4j/core/reasoner/KnowledgeBase.java | 331 +++++++++++------- .../vlog4j/core/reasoner/LoggingTest.java | 3 +- .../core/reasoner/ReasonerTimeoutTest.java | 4 +- .../implementation/AddDataSourceTest.java | 62 +--- .../implementation/AnswerQueryTest.java | 22 +- .../ExportQueryAnswersToCsvFileTest.java | 2 +- .../FileDataSourceTestUtils.java | 10 +- .../GeneratedAnonymousIndividualsTest.java | 3 +- .../LoadDataFromCsvFileTest.java | 9 +- .../LoadDataFromMemoryTest.java | 18 +- .../LoadDataFromRdfFileTest.java | 7 +- .../LoadDataFromSparqlQueryTest.java | 30 +- .../implementation/ReasonerStateTest.java | 26 +- .../reasoner/implementation/ReasonerTest.java | 8 +- .../StratifiedNegationTest.java | 12 +- .../vlog4j/examples/CountingTriangles.java | 9 +- .../vlog4j/examples/DoidExample.java | 31 +- .../examples/SimpleReasoningExample.java | 7 +- .../examples/core/AddDataFromCsvFile.java | 24 +- .../examples/core/AddDataFromRdfFile.java | 20 +- .../core/AddDataFromSparqlQueryResults.java | 5 +- .../core/ConfigureReasonerLogging.java | 4 +- .../SkolemVsRestrictedChaseTermination.java | 6 +- .../examples/graal/AddDataFromDlgpFile.java | 6 +- .../examples/graal/AddDataFromGraal.java | 6 +- .../examples/graal/DoidExampleGraal.java | 15 +- .../owlapi/OwlOntologyToRulesAndFacts.java | 4 +- .../examples/rdf/AddDataFromRdfModel.java | 18 +- .../semanticweb/vlog4j/parser/RuleParser.java | 32 +- .../vlog4j/parser/javacc/JavaCCParser.jj | 10 +- .../parser/javacc/JavaCCParserBase.java | 24 +- .../vlog4j/syntax/parser/RuleParserTest.java | 117 ++++--- .../vlog4j/rdf/TestReasonOverRdfFacts.java | 4 +- 33 files changed, 410 insertions(+), 479 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java index 2054973e8..c6cd573c8 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -1,28 +1,26 @@ package org.semanticweb.vlog4j.core.reasoner; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.lang3.Validate; -import org.eclipse.jdt.annotation.NonNull; import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.TermType; - -import karmaresearch.vlog.Atom; - +import org.semanticweb.vlog4j.core.model.api.Statement; +import org.semanticweb.vlog4j.core.model.api.StatementVisitor; /*- * #%L @@ -44,191 +42,240 @@ * #L% */ -public class KnowledgeBase{ - - private final List rules = new ArrayList<>(); - private final Map> factsForPredicate = new HashMap<>(); - private final Map dataSourceForPredicate = new HashMap<>(); +/** + * A knowledge base with rules, facts, and declartions for loading data from + * further sources. This is a "syntactic" object in that it represents some + * information that is not relevant for the semantics of reasoning, but that is + * needed to ensure faithful re-serialisation of knowledge bases loaded from + * files (e.g., preserving order). + * + * @author Markus Kroetzsch + * + */ +public class KnowledgeBase { /** - * Adds rules to the knowledge base in the given order. The reasoner may - * rewrite the rules internally according to the set - * {@link RuleRewriteStrategy}. + * Auxiliary class to process {@link Statement}s when added to the knowledge + * base. Returns true if a statement was added successfully. + * + * @author Markus Kroetzsch * - * @param rules non-null rules to be added to the knowledge base for - * reasoning. - * @throws IllegalArgumentException if the {@code rules} literals contain terms - * which are not of type - * {@link TermType#CONSTANT} or - * {@link TermType#VARIABLE}. */ - public void addRules(@NonNull Rule... rules) { - addRules(Arrays.asList(rules)); + class AddStatementVisitor implements StatementVisitor { + @Override + public Boolean visit(Fact statement) { + addFact(statement); + return true; + } + + @Override + public Boolean visit(Rule statement) { + return true; + } + + @Override + public Boolean visit(DataSourceDeclaration statement) { + dataSourceDeclarations.add(statement); + return true; + } + } + + final AddStatementVisitor addStatementVisitor = new AddStatementVisitor(); + + class ExtractStatementsVisitor implements StatementVisitor { + + final ArrayList extracted = new ArrayList<>(); + final Class ownType; + + ExtractStatementsVisitor(Class type) { + ownType = type; + } + + ArrayList getExtractedStatements() { + return extracted; + } + + @SuppressWarnings("unchecked") + @Override + public Void visit(Fact statement) { + if (ownType.equals(Fact.class)) { + extracted.add((T) statement); + } + return null; + } + + @SuppressWarnings("unchecked") + @Override + public Void visit(Rule statement) { + if (ownType.equals(Rule.class)) { + extracted.add((T) statement); + } + return null; + } + + @SuppressWarnings("unchecked") + @Override + public Void visit(DataSourceDeclaration statement) { + if (ownType.equals(DataSourceDeclaration.class)) { + extracted.add((T) statement); + } + return null; + } + } /** - * Adds rules to the knowledge base in the given order. The reasoner may - * rewrite the rules internally according to the set - * {@link RuleRewriteStrategy}. - * - * @param rules non-null rules to be added to the knowledge base for - * reasoning. - * @throws IllegalArgumentException if the {@code rules} literals contain terms - * which are not of type - * {@link TermType#CONSTANT} or - * {@link TermType#VARIABLE}. + * The primary storage for the contents of the knowledge base. + */ + final LinkedHashSet statements = new LinkedHashSet<>(); + /** + * Known prefixes that can be used to pretty-print the contents of the knowledge + * base. We try to preserve user-provided prefixes found in files when loading + * data. + */ + PrefixDeclarations prefixDeclarations; + + /** + * Index structure that organises all facts by their predicate. + */ + final Map> factsByPredicate = new HashMap<>(); + /** + * Index structure that holds all data source declarations of this knowledge + * base. */ - public void addRules(@NonNull List rules) { - Validate.noNullElements(rules, "Null rules are not alowed! The list contains a null at position [%d]."); - this.rules.addAll(new ArrayList<>(rules)); + final Set dataSourceDeclarations = new HashSet<>(); - // TODO setChanged - // TODO notify listeners with the diff + /** + * Adds a single statement to the knowledge base. + * + * @param statement + */ + public void addStatement(Statement statement) { + Validate.notNull(statement, "Statement cannot be Null."); + if (!this.statements.contains(statement) && statement.accept(this.addStatementVisitor)) { + this.statements.add(statement); + } } /** - * Get the list of all rules that have been added to the reasoner. The list is - * read-only and cannot be modified to add or delete rules. + * Adds a collection of statements to the knowledge base. * - * @return list of {@link Rule} + * @param statements */ - public List getRules() { - return Collections.unmodifiableList(this.rules); + public void addStatements(Collection statements) { + for (Statement statement : statements) { + addStatement(statement); + } } /** - * Adds non-null facts to the knowledge base. A fact is a - * {@link PositiveLiteral} with all terms ({@link PositiveLiteral#getTerms()}) - * of type {@link TermType#CONSTANT}.
    - * Facts predicates ({@link PositiveLiteral#getPredicate()}) cannot have - * multiple data sources. - * - * @param facts facts to be added to the knowledge base. The given order - * is not maintained. - * @throws IllegalArgumentException if the knowledge base contains facts - * from a data source with the same predicate - * ({@link PositiveLiteral#getPredicate()}) as - * a {@link PositiveLiteral} among given - * {@code facts}. - * @throws IllegalArgumentException if the {@code facts} literals contain terms - * which are not of type - * {@link TermType#CONSTANT}. + * Adds a list of statements to the knowledge base. + * + * @param statements */ - public void addFacts(final Fact... facts) { - addFacts(Arrays.asList(facts)); + public void addStatements(Statement... statements) { + for (Statement statement : statements) { + addStatement(statement); + } + } - // TODO setChanged - // TODO notify listeners with the diff + /** + * Get the list of all rules that have been added to the knowledge base. The + * list is read-only and cannot be modified to add or delete rules. + * + * @return list of {@link Rule}s + */ + public List getRules() { + return getStatementsByType(Rule.class); } /** - * Adds non-null facts to the knowledge base. A fact is a - * {@link PositiveLiteral} with all terms ({@link PositiveLiteral#getTerms()}) - * of type {@link TermType#CONSTANT}.
    - * Facts predicates ({@link PositiveLiteral#getPredicate()}) cannot have - * multiple data sources. - * - * @param facts facts to be added to the knowledge base. - * @throws IllegalArgumentException if the knowledge base contains facts - * from a data source with the same predicate - * ({@link PositiveLiteral#getPredicate()}) as - * an {@link PositiveLiteral} among given - * {@code facts}. - * @throws IllegalArgumentException if the {@code facts} literals contain terms - * which are not of type - * {@link TermType#CONSTANT}. + * Get the list of all facts that have been added to the knowledge base. The + * list is read-only and cannot be modified to add or delete facts. + * + * @return list of {@link Fact}s */ - // TODO add examples to javadoc about multiple sources per predicate and EDB/IDB - public void addFacts(final Collection facts) { - Validate.noNullElements(facts, "Null facts are not alowed! The list contains a fact at position [%d]."); - for (final PositiveLiteral fact : facts) { - validateFactTermsAreConstant(fact); + public List getFacts() { + return getStatementsByType(Fact.class); + } - final Predicate predicate = fact.getPredicate(); - validateNoDataSourceForPredicate(predicate); + /** + * Get the list of all data source declarations that have been added to the + * knowledge base. The list is read-only and cannot be modified to add or delete + * facts. + * + * @return list of {@link DataSourceDeclaration}s + */ + public List getDataSourceDeclarations() { + return getStatementsByType(DataSourceDeclaration.class); + } - this.factsForPredicate.putIfAbsent(predicate, new HashSet<>()); - this.factsForPredicate.get(predicate).add(fact); + List getStatementsByType(Class type) { + ExtractStatementsVisitor visitor = new ExtractStatementsVisitor<>(type); + for (Statement statement : statements) { + statement.accept(visitor); } + return Collections.unmodifiableList(visitor.getExtractedStatements()); } /** - * Adds facts stored in given {@code dataSource} for given {@code predicate} to - * the knowledge base. Facts predicates cannot have multiple data - * sources, including in-memory {@link Atom} objects added trough - * {@link #addFacts}. - * - * @param predicate the {@link Predicate} for which the given - * {@code dataSource} contains fact terms. - * @param dataSource data source containing the fact terms to be associated to - * given predicate and added to the reasoner - * @throws IllegalArgumentException if the knowledge base contains facts - * in memory (added using {@link #addFacts}) or - * from a data source with the same - * {@link Predicate} as given - * {@code predicate}. + * Add a single fact to the internal data structures. It is assumed that it has + * already been checked that this fact is not present yet. + * + * @param fact the fact to add */ - // TODO add example to javadoc with two datasources and with in-memory facts for - // the same predicate. - // TODO validate predicate arity corresponds to the dataSource facts arity - public void addFactsFromDataSource(Predicate predicate, DataSource dataSource) { - Validate.notNull(predicate, "Null predicates are not allowed!"); - Validate.notNull(dataSource, "Null dataSources are not allowed!"); - validateNoDataSourceForPredicate(predicate); - Validate.isTrue(!this.factsForPredicate.containsKey(predicate), - "Multiple data sources for the same predicate are not allowed! Facts for predicate [%s] alredy added in memory: %s", - predicate, this.factsForPredicate.get(predicate)); - - this.dataSourceForPredicate.put(predicate, dataSource); + void addFact(Fact fact) { + final Predicate predicate = fact.getPredicate(); + this.factsByPredicate.putIfAbsent(predicate, new HashSet<>()); + this.factsByPredicate.get(predicate).add(fact); } + @Deprecated public boolean hasFacts() { - return !this.dataSourceForPredicate.isEmpty() || !this.factsForPredicate.isEmpty(); + // If needed, a more elegant implementation should be used + return !this.getFacts().isEmpty() || !this.getDataSourceDeclarations().isEmpty(); } + @Deprecated public Map getDataSourceForPredicate() { - return this.dataSourceForPredicate; + // Only for temporary functionality; the one-source-per-predicate model will be + // retired and is no longer enforced in the knowledge base + Map result = new HashMap<>(); + for (DataSourceDeclaration dsd : getDataSourceDeclarations()) { + result.put(dsd.getPredicate(), dsd.getDataSource()); + } + return result; } + @Deprecated public Map> getFactsForPredicate() { - return this.factsForPredicate; + // Check if this is really the best format to access this data + return this.factsByPredicate; } + @Deprecated public Set getEdbPredicates() { // TODO use cache return collectEdbPredicates(); } + @Deprecated public Set getIdbPredicates() { // TODO use cache return collectIdbPredicates(); } - private void validateFactTermsAreConstant(PositiveLiteral fact) { - final Set nonConstantTerms = new HashSet<>(fact.getTerms()); - nonConstantTerms.removeAll(fact.getConstants()); - Validate.isTrue(nonConstantTerms.isEmpty(), - "Only Constant terms alowed in Fact literals! The following non-constant terms [%s] appear for fact [%s]!", - nonConstantTerms, fact); - - } - - private void validateNoDataSourceForPredicate(final Predicate predicate) { - Validate.isTrue(!this.dataSourceForPredicate.containsKey(predicate), - "Multiple data sources for the same predicate are not allowed! Facts for predicate [%s] alredy added from data source: %s", - predicate, this.dataSourceForPredicate.get(predicate)); - } - - private Set collectEdbPredicates() { + Set collectEdbPredicates() { + // not an efficient or elegant implementation final Set edbPredicates = new HashSet<>(); - edbPredicates.addAll(this.dataSourceForPredicate.keySet()); - edbPredicates.addAll(this.factsForPredicate.keySet()); + edbPredicates.addAll(this.getDataSourceForPredicate().keySet()); + edbPredicates.addAll(this.factsByPredicate.keySet()); return edbPredicates; } - private Set collectIdbPredicates() { + Set collectIdbPredicates() { final Set idbPredicates = new HashSet<>(); - for (final Rule rule : this.rules) { + for (final Rule rule : this.getRules()) { for (final Literal headAtom : rule.getHead()) { idbPredicates.add(headAtom.getPredicate()); } @@ -236,4 +283,16 @@ private Set collectIdbPredicates() { return idbPredicates; } + /** + * Returns all {@link Statement}s of this knowledge base. + * + * The result can be iterated over and will return statements in the original + * order. + * + * @return a collection of statements + */ + public Collection getStatements() { + return this.statements; + } + } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java index 339fd6eda..161858d51 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java @@ -59,8 +59,7 @@ public class LoggingTest { private static final KnowledgeBase kb = new KnowledgeBase(); static { - kb.addRules(rule); - kb.addFacts(factPc); + kb.addStatements(rule, factPc); } // TODO remaining tests: change log file diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java index 7fc830cda..6706b5845 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java @@ -113,8 +113,8 @@ public static void setUpBeforeClass() { final Rule infinite_rule = makeRule(infinite_IDB_yz, infinite_IDB_xy); rules.add(infinite_rule); - kb.addRules(rules); - kb.addFacts(facts); + kb.addStatements(rules); + kb.addStatements(facts); } @Before diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java index 4f7077a81..80cf85f90 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java @@ -38,6 +38,7 @@ import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; @@ -58,9 +59,10 @@ public void testAddDataSourceExistentDataForDifferentPredicates() throws Reasone final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); final KnowledgeBase kb = new KnowledgeBase(); - kb.addFacts(factPredicatePArity2, factPredicateQArity1); - kb.addFactsFromDataSource(predicateLArity1, dataSource); - kb.addFactsFromDataSource(predicateParity1, dataSource); + kb.addStatement(factPredicatePArity2); + kb.addStatement(factPredicateQArity1); + kb.addStatement(new DataSourceDeclarationImpl(predicateLArity1, dataSource)); + kb.addStatement(new DataSourceDeclarationImpl(predicateParity1, dataSource)); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); @@ -87,8 +89,8 @@ public void testAddDataSourceBeforeLoading() throws ReasonerStateException, EdbI final KnowledgeBase kb = new KnowledgeBase(); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - kb.addFactsFromDataSource(predicateP, dataSource); - kb.addFactsFromDataSource(predicateQ, dataSource); + kb.addStatement(new DataSourceDeclarationImpl(predicateP, dataSource)); + kb.addStatement(new DataSourceDeclarationImpl(predicateQ, dataSource)); reasoner.load(); } } @@ -105,9 +107,9 @@ public void testAddDataSourceAfterLoading() throws ReasonerStateException, EdbId final KnowledgeBase kb = new KnowledgeBase(); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - kb.addFactsFromDataSource(predicateP, dataSource); + kb.addStatement(new DataSourceDeclarationImpl(predicateP, dataSource)); reasoner.load(); - kb.addFactsFromDataSource(predicateQ, dataSource); + kb.addStatement(new DataSourceDeclarationImpl(predicateQ, dataSource)); } } @@ -123,53 +125,11 @@ public void testAddDataSourceAfterReasoning() throws ReasonerStateException, Edb final KnowledgeBase kb = new KnowledgeBase(); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - kb.addFactsFromDataSource(predicateP, dataSource); + kb.addStatement(new DataSourceDeclarationImpl(predicateP, dataSource)); reasoner.load(); reasoner.reason(); - kb.addFactsFromDataSource(predicateQ, dataSource); + kb.addStatement(new DataSourceDeclarationImpl(predicateQ, dataSource)); } } - // TODO move to a test class for KnowledgeBase - @Test(expected = IllegalArgumentException.class) - public void testAddDataSourceNoMultipleDataSourcesForPredicate() throws ReasonerStateException, IOException { - final Predicate predicate = Expressions.makePredicate("p", 1); - final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); - - final KnowledgeBase kb = new KnowledgeBase(); - kb.addFactsFromDataSource(predicate, dataSource); - kb.addFactsFromDataSource(predicate, dataSource); - } - - // TODO move to a test class for KnowledgeBase - @Test(expected = IllegalArgumentException.class) - public void testAddDataSourceNoFactsForPredicate() throws ReasonerStateException, IOException { - final Predicate predicate = Expressions.makePredicate("p", 1); - final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); - final Fact fact = Expressions.makeFact(Expressions.makePredicate("p", 1), - Arrays.asList(Expressions.makeConstant("a"))); - - final KnowledgeBase kb = new KnowledgeBase(); - kb.addFacts(fact); - kb.addFactsFromDataSource(predicate, dataSource); - } - - // TODO move to a test class for KnowledgeBase - @Test(expected = NullPointerException.class) - public void testAddDataSourcePredicateNotNull() throws ReasonerStateException, IOException { - final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); - - final KnowledgeBase kb = new KnowledgeBase(); - kb.addFactsFromDataSource(null, dataSource); - } - - // TODO move to a test class for KnowledgeBase - @Test(expected = NullPointerException.class) - public void testAddDataSourceNotNullDataSource() throws ReasonerStateException { - final Predicate predicate = Expressions.makePredicate("p", 1); - - final KnowledgeBase kb = new KnowledgeBase(); - kb.addFactsFromDataSource(predicate, null); - } - } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java index 5c2843d45..ecd23c64e 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java @@ -72,7 +72,7 @@ public void testEDBQuerySameConstantSubstitutesSameVariableName() try (final Reasoner reasoner = Reasoner.getInstance()) { final KnowledgeBase kb = reasoner.getKnowledgeBase(); - kb.addFacts(fact); + kb.addStatement(fact); reasoner.load(); final PositiveLiteral queryAtomXYZ = Expressions.makePositiveLiteral(predicate, x, y, z); @@ -112,11 +112,11 @@ public void testIDBQuerySameBlankSubstitutesSameVariableName() final Rule pX__pYY_pYZ = Expressions.makeRule(Expressions.makePositiveConjunction(pYY, pYZ), Expressions.makeConjunction(Expressions.makePositiveLiteral(predicate, x))); assertEquals(Sets.newSet(y, z), pX__pYY_pYZ.getExistentiallyQuantifiedVariables()); - + final KnowledgeBase kb = new KnowledgeBase(); - kb.addRules(pX__pYY_pYZ); - kb.addFacts(Expressions.makeFact(predicate, Arrays.asList(Expressions.makeConstant("c")))); + kb.addStatements(pX__pYY_pYZ); + kb.addStatement(Expressions.makeFact(predicate, Arrays.asList(Expressions.makeConstant("c")))); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); @@ -160,9 +160,8 @@ public void testIDBQuerySameIndividualSubstitutesSameVariableName() final Fact factPcd = Expressions.makeFact(predicate, Arrays.asList(constantC, constantD)); final KnowledgeBase kb = new KnowledgeBase(); - - kb.addRules(pXY__pXYYZZT); - kb.addFacts(factPcd); + + kb.addStatements(pXY__pXYYZZT, factPcd); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); @@ -230,8 +229,7 @@ public void queryResultWithBlanks() final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("q", Expressions.makeVariable("?x")); final KnowledgeBase kb = new KnowledgeBase(); - kb.addRules(existentialRule); - kb.addFacts(fact); + kb.addStatements(existentialRule, fact); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); @@ -255,7 +253,7 @@ public void queryResultWithBlanks() @Test public void queryEmptyKnowledgeBase() throws IOException, EdbIdbSeparationException, ReasonerStateException, IncompatiblePredicateArityException { - final KnowledgeBase kb = new KnowledgeBase(); + final KnowledgeBase kb = new KnowledgeBase(); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); @@ -278,7 +276,7 @@ public void queryEmptyRules() throws IOException, EdbIdbSeparationException, ReasonerStateException, IncompatiblePredicateArityException { final KnowledgeBase kb = new KnowledgeBase(); final Fact fact = Expressions.makeFact("P", Arrays.asList(Expressions.makeConstant("c"))); - kb.addFacts(fact); + kb.addStatement(fact); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); @@ -304,7 +302,7 @@ public void queryEmptyFacts() throws EDBConfigurationException, IOException, Edb Expressions.makePositiveLiteral("p", vx)); final KnowledgeBase kb = new KnowledgeBase(); - kb.addRules(rule); + kb.addStatement(rule); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExportQueryAnswersToCsvFileTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExportQueryAnswersToCsvFileTest.java index 2b281417b..189c2748d 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExportQueryAnswersToCsvFileTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExportQueryAnswersToCsvFileTest.java @@ -57,7 +57,7 @@ public void testEDBQuerySameConstantSubstitutesSameVariableName() final KnowledgeBase kb = new KnowledgeBase(); - kb.addFacts(fact); + kb.addStatement(fact); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java index 7c9580b02..ab02fdac0 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java @@ -38,6 +38,7 @@ import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.reasoner.Algorithm; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; @@ -94,8 +95,7 @@ public static List> getCSVContent(final String csvFile) throws IOEx } public static void testConstructor(final FileDataSource fileDataSource, final File expectedFile, - final String expectedDirCanonicalPath, final String expectedFileNameWithoutExtension) - throws IOException { + final String expectedDirCanonicalPath, final String expectedFileNameWithoutExtension) throws IOException { assertEquals(expectedFile, fileDataSource.getFile()); assertEquals(expectedDirCanonicalPath, fileDataSource.getDirCanonicalPath()); assertEquals(expectedFileNameWithoutExtension, fileDataSource.getFileNameWithoutExtension()); @@ -103,10 +103,10 @@ public static void testConstructor(final FileDataSource fileDataSource, final Fi public static void testLoadEmptyFile(final Predicate predicate, final PositiveLiteral queryAtom, final FileDataSource emptyFileDataSource) - throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { - + throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { + final KnowledgeBase kb = new KnowledgeBase(); - kb.addFactsFromDataSource(predicate, emptyFileDataSource); + kb.addStatement(new DataSourceDeclarationImpl(predicate, emptyFileDataSource)); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java index 8f6d3a356..402568bb0 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java @@ -73,8 +73,7 @@ public class GeneratedAnonymousIndividualsTest { // y,z existential variables that can introduce blanks (anonymous individuals) assertEquals(Sets.newSet(vy, vz), existentialRule.getExistentiallyQuantifiedVariables()); - kb.addRules(existentialRule); - kb.addFacts(fact); + kb.addStatements(existentialRule, fact); } @Test diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromCsvFileTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromCsvFileTest.java index f9a31cb0e..9b0f5af85 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromCsvFileTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromCsvFileTest.java @@ -39,6 +39,7 @@ import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; @@ -77,8 +78,8 @@ private void testLoadUnaryFactsFromSingleCsvDataSource(final FileDataSource file throws ReasonerStateException, EdbIdbSeparationException, EDBConfigurationException, IOException, IncompatiblePredicateArityException { final KnowledgeBase kb = new KnowledgeBase(); - kb.addFactsFromDataSource(unaryPredicate1, fileDataSource); - kb.addFactsFromDataSource(unaryPredicate2, fileDataSource); + kb.addStatement(new DataSourceDeclarationImpl(unaryPredicate1, fileDataSource)); + kb.addStatement(new DataSourceDeclarationImpl(unaryPredicate2, fileDataSource)); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); @@ -111,7 +112,7 @@ public void testLoadNonexistingCsvFile() assertFalse(nonexistingFile.exists()); final FileDataSource fileDataSource = new CsvFileDataSource(nonexistingFile); final KnowledgeBase kb = new KnowledgeBase(); - kb.addFactsFromDataSource(unaryPredicate1, fileDataSource); + kb.addStatement(new DataSourceDeclarationImpl(unaryPredicate1, fileDataSource)); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); @@ -124,7 +125,7 @@ public void testLoadCsvFileWrongArity() final FileDataSource fileDataSource = new CsvFileDataSource(new File( FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.binaryCsvFileNameRoot + ".csv")); final KnowledgeBase kb = new KnowledgeBase(); - kb.addFactsFromDataSource(unaryPredicate1, fileDataSource); + kb.addStatement(new DataSourceDeclarationImpl(unaryPredicate1, fileDataSource)); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java index 8405b1400..92ebbab92 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java @@ -49,8 +49,7 @@ public void loadEdbIdbNotSeparated() throws EDBConfigurationException, IOExcepti final Fact factEDBpredQ2 = Expressions.makeFact("q", Arrays.asList(Expressions.makeConstant("d"), Expressions.makeConstant("d"))); final KnowledgeBase kb = new KnowledgeBase(); - kb.addRules(rule); - kb.addFacts(factIDBpredQ1, factEDBpredQ2); + kb.addStatements(rule, factIDBpredQ1, factEDBpredQ2); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); @@ -67,8 +66,7 @@ public void loadEdbIdbSeparated() throws EDBConfigurationException, IOException, final Fact factEDBpred = Expressions.makeFact("q", Arrays.asList(Expressions.makeConstant("d"), Expressions.makeConstant("d"))); final KnowledgeBase kb = new KnowledgeBase(); - kb.addRules(rule); - kb.addFacts(factEDBpred); + kb.addStatements(rule, factEDBpred); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); @@ -82,17 +80,7 @@ public void addFactsWithVariableTerms() throws ReasonerStateException { final Fact factWithVariableTerms = Expressions.makeFact("q", Arrays.asList(Expressions.makeConstant("d"), Expressions.makeVariable("x"))); final KnowledgeBase kb = new KnowledgeBase(); - kb.addFacts(factWithVariableTerms); - } - - // TODO move to a test class for KnowledgeBase - @Test(expected = IllegalArgumentException.class) - public void addFactsWithBlankTerms() throws ReasonerStateException { - - final Fact factWithBlankTerms = Expressions.makeFact("q", Arrays.asList(Expressions.makeConstant("d"), - new BlankImpl("b"))); - final KnowledgeBase kb = new KnowledgeBase(); - kb.addFacts(factWithBlankTerms); + kb.addStatement(factWithVariableTerms); } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java index 752c05bcc..dd2a510c0 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java @@ -40,6 +40,7 @@ import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; @@ -90,7 +91,7 @@ public void testLoadTernaryFactsFromSingleRdfDataSource(final FileDataSource fil throws ReasonerStateException, EdbIdbSeparationException, EDBConfigurationException, IOException, IncompatiblePredicateArityException { final KnowledgeBase kb = new KnowledgeBase(); - kb.addFactsFromDataSource(ternaryPredicate, fileDataSource); + kb.addStatement(new DataSourceDeclarationImpl(ternaryPredicate, fileDataSource)); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); @@ -109,7 +110,7 @@ public void testLoadNonexistingRdfFile() assertFalse(nonexistingFile.exists()); final FileDataSource fileDataSource = new RdfFileDataSource(nonexistingFile); final KnowledgeBase kb = new KnowledgeBase(); - kb.addFactsFromDataSource(ternaryPredicate, fileDataSource); + kb.addStatement(new DataSourceDeclarationImpl(ternaryPredicate, fileDataSource)); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); @@ -122,7 +123,7 @@ public void testLoadRdfInvalidFormat() final FileDataSource fileDataSource = new RdfFileDataSource(new File( FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.invalidFormatNtFileNameRoot + ".nt")); final KnowledgeBase kb = new KnowledgeBase(); - kb.addFactsFromDataSource(ternaryPredicate, fileDataSource); + kb.addStatement(new DataSourceDeclarationImpl(ternaryPredicate, fileDataSource)); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromSparqlQueryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromSparqlQueryTest.java index 839f0044c..388f07d4f 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromSparqlQueryTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromSparqlQueryTest.java @@ -36,10 +36,10 @@ import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Variable; +import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; - public class LoadDataFromSparqlQueryTest { /** @@ -62,12 +62,12 @@ public void testSimpleSparqlQuery() "?a wdt:P22 ?b"); final Predicate fatherOfPredicate = Expressions.makePredicate("FatherOf", 2); final KnowledgeBase kb = new KnowledgeBase(); - kb.addFactsFromDataSource(fatherOfPredicate, dataSource); + kb.addStatement(new DataSourceDeclarationImpl(fatherOfPredicate, dataSource)); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); - try (final QueryResultIterator answerQuery = reasoner.answerQuery(Expressions.makePositiveLiteral(fatherOfPredicate, - Expressions.makeVariable("x"), Expressions.makeVariable("y")), false)) { + try (final QueryResultIterator answerQuery = reasoner.answerQuery(Expressions.makePositiveLiteral( + fatherOfPredicate, Expressions.makeVariable("x"), Expressions.makeVariable("y")), false)) { assertTrue(answerQuery.hasNext()); final QueryResult firstAnswer = answerQuery.next(); @@ -88,12 +88,12 @@ public void testSimpleSparqlQueryHttps() "?a wdt:P22 ?b"); final Predicate fatherOfPredicate = Expressions.makePredicate("FatherOf", 2); final KnowledgeBase kb = new KnowledgeBase(); - kb.addFactsFromDataSource(fatherOfPredicate, dataSource); + kb.addStatement(new DataSourceDeclarationImpl(fatherOfPredicate, dataSource)); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); - try (final QueryResultIterator answerQuery = reasoner.answerQuery(Expressions.makePositiveLiteral(fatherOfPredicate, - Expressions.makeVariable("x"), Expressions.makeVariable("y")), false)) { + try (final QueryResultIterator answerQuery = reasoner.answerQuery(Expressions.makePositiveLiteral( + fatherOfPredicate, Expressions.makeVariable("x"), Expressions.makeVariable("y")), false)) { assertTrue(answerQuery.hasNext()); final QueryResult firstAnswer = answerQuery.next(); @@ -122,12 +122,12 @@ public void testSimpleSparqlQuery2() "?a wdt:P22 ?b ."); final Predicate fatherOfPredicate = Expressions.makePredicate("FatherOf", 2); final KnowledgeBase kb = new KnowledgeBase(); - kb.addFactsFromDataSource(fatherOfPredicate, dataSource); + kb.addStatement(new DataSourceDeclarationImpl(fatherOfPredicate, dataSource)); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); - try (final QueryResultIterator answerQuery = reasoner.answerQuery(Expressions.makePositiveLiteral(fatherOfPredicate, - Expressions.makeVariable("x"), Expressions.makeVariable("y")), false)) { + try (final QueryResultIterator answerQuery = reasoner.answerQuery(Expressions.makePositiveLiteral( + fatherOfPredicate, Expressions.makeVariable("x"), Expressions.makeVariable("y")), false)) { assertTrue(answerQuery.hasNext()); } @@ -146,7 +146,7 @@ public void testConjunctiveQueryNewLineCharacterInQueryBody() "?b wdt:P22 ?a .\n" + "?b wdt:P25 ?c"); final Predicate haveChildrenTogether = Expressions.makePredicate("haveChildrenTogether", 2); final KnowledgeBase kb = new KnowledgeBase(); - kb.addFactsFromDataSource(haveChildrenTogether, dataSource); + kb.addStatement(new DataSourceDeclarationImpl(haveChildrenTogether, dataSource)); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); @@ -167,12 +167,12 @@ public void testConjunctiveQuery() "?b wdt:P22 ?a ." + "?b wdt:P25 ?c"); final Predicate haveChildrenTogether = Expressions.makePredicate("haveChildrenTogether", 2); final KnowledgeBase kb = new KnowledgeBase(); - kb.addFactsFromDataSource(haveChildrenTogether, dataSource); + kb.addStatement(new DataSourceDeclarationImpl(haveChildrenTogether, dataSource)); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); - try (final QueryResultIterator answerQuery = reasoner.answerQuery(Expressions.makePositiveLiteral(haveChildrenTogether, - Expressions.makeVariable("x"), Expressions.makeVariable("y")), false)) { + try (final QueryResultIterator answerQuery = reasoner.answerQuery(Expressions.makePositiveLiteral( + haveChildrenTogether, Expressions.makeVariable("x"), Expressions.makeVariable("y")), false)) { assertTrue(answerQuery.hasNext()); } @@ -190,7 +190,7 @@ public void testDataSourcePredicateDoesNotMatchSparqlQueryTerms() // b has father a and b has mother c "?b wdt:P22 ?a ." + "?b wdt:P25 ?c"); final KnowledgeBase kb = new KnowledgeBase(); - kb.addFactsFromDataSource(Expressions.makePredicate("ternary", 3), dataSource); + kb.addStatement(new DataSourceDeclarationImpl(Expressions.makePredicate("ternary", 3), dataSource)); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java index f7f8df188..c88cb7fe4 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java @@ -44,6 +44,7 @@ import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Variable; +import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.Algorithm; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; @@ -85,7 +86,7 @@ public void testSetReasoningTimeout() { public void testAddRules1() throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { try (final Reasoner reasoner = Reasoner.getInstance();) { - reasoner.getKnowledgeBase().addRules(ruleQxPx); + reasoner.getKnowledgeBase().addStatement(ruleQxPx); reasoner.load(); } } @@ -94,20 +95,20 @@ public void testAddRules1() public void testAddRules2() throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { final KnowledgeBase kb = new KnowledgeBase(); - kb.addRules(ruleQxPx); + kb.addStatement(ruleQxPx); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); reasoner.resetReasoner(); } } - @Test(expected = IllegalArgumentException.class) + @Test(expected = NullPointerException.class) public void testAddRules3() { final KnowledgeBase kb = new KnowledgeBase(); final List rules = new ArrayList<>(); rules.add(ruleQxPx); rules.add(null); - kb.addRules(rules); + kb.addStatements(rules); } // FIXME update test @@ -117,13 +118,13 @@ public void testAddFacts1() throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { final KnowledgeBase kb = new KnowledgeBase(); - kb.addFacts(factPc); + kb.addStatement(factPc); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); } } - @Test(expected = IllegalArgumentException.class) + @Test(expected = NullPointerException.class) public void testAddFacts2() throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { @@ -131,7 +132,7 @@ public void testAddFacts2() final List facts = new ArrayList<>(); facts.add(factPc); facts.add(null); - kb.addFacts(facts); + kb.addStatements(facts); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); @@ -175,8 +176,7 @@ public void setRuleRewriteStrategy3() public void testResetDiscardInferences() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { final KnowledgeBase kb = new KnowledgeBase(); - kb.addRules(ruleQxPx); - kb.addFacts(factPc); + kb.addStatements(ruleQxPx, factPc); for (final Algorithm algorithm : Algorithm.values()) { // discard inferences regardless of the inference algorithm @@ -212,13 +212,13 @@ public void testResetDiscardInferences() public void testResetKeepExplicitDatabase() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { final KnowledgeBase kb = new KnowledgeBase(); - kb.addRules(ruleQxPx); + kb.addStatement(ruleQxPx); // assert p(c) - kb.addFacts(factPc); + kb.addStatement(factPc); // assert r(d) final Predicate predicateR1 = Expressions.makePredicate("r", 1); - kb.addFactsFromDataSource(predicateR1, - new CsvFileDataSource(new File(FileDataSourceTestUtils.INPUT_FOLDER, "constantD.csv"))); + kb.addStatement(new DataSourceDeclarationImpl(predicateR1, + new CsvFileDataSource(new File(FileDataSourceTestUtils.INPUT_FOLDER, "constantD.csv")))); // p(?x) -> q(?x) try (final VLogReasoner reasoner = new VLogReasoner(kb)) { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerTest.java index 5c6b0b1b4..1067715ef 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerTest.java @@ -79,11 +79,10 @@ public void testCloseRepeatedly() public void testLoadRules() throws EdbIdbSeparationException, IOException, IncompatiblePredicateArityException, ReasonerStateException { final KnowledgeBase kb = new KnowledgeBase(); - kb.addRules(ruleBxAx, ruleCxBx); - kb.addRules(ruleBxAx); + kb.addStatements(ruleBxAx, ruleCxBx, ruleBxAx); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - assertEquals(kb.getRules(), Arrays.asList(ruleBxAx, ruleCxBx, ruleBxAx)); + assertEquals(Arrays.asList(ruleBxAx, ruleCxBx), kb.getRules()); } } @@ -91,8 +90,7 @@ public void testLoadRules() public void testSimpleInference() throws EDBConfigurationException, IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { final KnowledgeBase kb = new KnowledgeBase(); - kb.addRules(ruleBxAx, ruleCxBx); - kb.addFacts(factAc, factAd); + kb.addStatements(ruleBxAx, ruleCxBx, factAc, factAd); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/StratifiedNegationTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/StratifiedNegationTest.java index 78ac9d8a7..88809c236 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/StratifiedNegationTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/StratifiedNegationTest.java @@ -63,8 +63,7 @@ public void testNotStratifiableEdbIdbSeparation() final Fact fact = makeFact("Q", Arrays.asList(makeConstant("c"), makeConstant("d"))); final KnowledgeBase kb = new KnowledgeBase(); - kb.addRules(rule); - kb.addFacts(fact); + kb.addStatements(rule, fact); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); @@ -86,8 +85,7 @@ public void testNotStratifiable() final Fact fact = makeFact("P", Arrays.asList(makeConstant("c"), makeConstant("d"))); final KnowledgeBase kb = new KnowledgeBase(); - kb.addRules(rule); - kb.addFacts(fact); + kb.addStatements(rule, fact); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); @@ -116,8 +114,7 @@ public void testStratifiable() final Fact qCD = makeFact("Q", Arrays.asList(makeConstant("c"), makeConstant("d"))); final KnowledgeBase kb = new KnowledgeBase(); - kb.addRules(rule); - kb.addFacts(pCD, pEF, qCD); + kb.addStatements(rule, pCD, pEF, qCD); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); @@ -152,8 +149,7 @@ public void testInputNegation() final Fact qCD = makeFact("Q", Arrays.asList(makeConstant("c"), makeConstant("d"))); final KnowledgeBase kb = new KnowledgeBase(); - kb.addRules(rule); - kb.addFacts(pCD, pEF, qCD); + kb.addStatements(rule, pCD, pEF, qCD); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java index 5e7847d02..6883d6a0d 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java @@ -25,7 +25,6 @@ import java.io.IOException; import org.semanticweb.vlog4j.core.exceptions.VLog4jException; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; @@ -45,19 +44,15 @@ public class CountingTriangles { public static void main(final String[] args) throws IOException { ExamplesUtils.configureLogging(); - final KnowledgeBase kb = new KnowledgeBase(); + KnowledgeBase kb; /* Configure rules */ final RuleParser ruleParser = new RuleParser(); try { - ruleParser.parse(new FileInputStream(ExamplesUtils.INPUT_FOLDER + "counting-triangles.rls")); + kb = ruleParser.parse(new FileInputStream(ExamplesUtils.INPUT_FOLDER + "counting-triangles.rls")); } catch (final ParsingException e) { System.out.println("Failed to parse rules: " + e.getMessage()); return; } - for (final DataSourceDeclaration dataSourceDeclaration : ruleParser.getDataSourceDeclartions()) { - kb.addFactsFromDataSource(dataSourceDeclaration.getPredicate(), dataSourceDeclaration.getDataSource()); - } - kb.addRules(ruleParser.getRules()); System.out.println("Rules used in this example:"); kb.getRules().forEach(System.out::println); System.out.println(""); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java index beaba303d..73a7c36c5 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java @@ -26,7 +26,6 @@ import java.util.List; import org.semanticweb.vlog4j.core.exceptions.VLog4jException; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.LogLevel; @@ -51,29 +50,23 @@ public class DoidExample { public static void main(final String[] args) throws IOException { ExamplesUtils.configureLogging(); - final KnowledgeBase kb = new KnowledgeBase(); + /* Configure rules */ + KnowledgeBase kb; + final RuleParser ruleParser = new RuleParser(); + try { + kb = ruleParser.parse(new FileInputStream(ExamplesUtils.INPUT_FOLDER + "/doid.rls")); + } catch (final ParsingException e) { + System.out.println("Failed to parse rules: " + e.getMessage()); + return; + } + System.out.println("Rules used in this example:"); + kb.getRules().forEach(System.out::println); + System.out.println(""); try (Reasoner reasoner = new VLogReasoner(kb)) { reasoner.setLogFile(ExamplesUtils.OUTPUT_FOLDER + "vlog.log"); reasoner.setLogLevel(LogLevel.DEBUG); - /* Configure rules */ - final RuleParser ruleParser = new RuleParser(); - try { - ruleParser.parse(new FileInputStream(ExamplesUtils.INPUT_FOLDER + "/doid.rls")); - } catch (final ParsingException e) { - System.out.println("Failed to parse rules: " + e.getMessage()); - return; - } - - for (final DataSourceDeclaration dataSourceDeclaration : ruleParser.getDataSourceDeclartions()) { - kb.addFactsFromDataSource(dataSourceDeclaration.getPredicate(), dataSourceDeclaration.getDataSource()); - } - kb.addRules(ruleParser.getRules()); - System.out.println("Rules used in this example:"); - kb.getRules().forEach(System.out::println); - System.out.println(""); - /* Initialise reasoner and compute inferences */ System.out.print("Initialising rules and data sources ... "); reasoner.load(); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java index 55b34e6ab..d78c1ee33 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java @@ -70,17 +70,14 @@ public static void main(final String[] args) throws IOException { System.out.println("Knowledge base used in this example:\n\n" + rules); final RuleParser ruleParser = new RuleParser(); + KnowledgeBase kb; try { - ruleParser.parse(rules); + kb = ruleParser.parse(rules); } catch (final ParsingException e) { System.out.println("Failed to parse rules: " + e.getMessage()); return; } - final KnowledgeBase kb = new KnowledgeBase(); - kb.addRules(ruleParser.getRules()); - kb.addFacts(ruleParser.getFacts()); - try (final Reasoner reasoner = new VLogReasoner(kb)) { System.out.print("Loading rules and facts ... "); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java index 225e8216c..c82d67fcd 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java @@ -25,11 +25,11 @@ import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; import org.semanticweb.vlog4j.examples.ExamplesUtils; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; @@ -77,21 +77,13 @@ public static void main(final String[] args) throws EdbIdbSeparationException, I + "isPartOfIDB(?X, ?Y) :- hasPartIDB(?Y, ?X) ."; RuleParser ruleParser = new RuleParser(); - ruleParser.parse(rules); + final KnowledgeBase kb = ruleParser.parse(rules); - try (final Reasoner reasoner = Reasoner.getInstance()) { - - final KnowledgeBase kb = reasoner.getKnowledgeBase(); - /* 1. Add data to Knowledge Base. */ - kb.addRules(ruleParser.getRules()); - for (DataSourceDeclaration dataSourceDeclaration : ruleParser.getDataSourceDeclartions()) { - kb.addFactsFromDataSource(dataSourceDeclaration.getPredicate(), dataSourceDeclaration.getDataSource()); - } - - /* - * 2. Loading, reasoning, and querying while using try-with-resources to close - * the reasoner automatically. - */ + /* + * Loading, reasoning, and querying while using try-with-resources to close the + * reasoner automatically. + */ + try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); System.out.println("Before materialisation:"); @@ -103,7 +95,7 @@ public static void main(final String[] args) throws EdbIdbSeparationException, I final PositiveLiteral hasPartIdbXY = ruleParser.parsePositiveLiteral("hasPartIDB(?X, ?Y)"); ExamplesUtils.printOutQueryAnswers(hasPartIdbXY, reasoner); - /* 3. Exporting query answers to {@code .csv} files. */ + /* Exporting query answers to {@code .csv} files. */ reasoner.exportQueryAnswersToCsv(hasPartIdbXY, ExamplesUtils.OUTPUT_FOLDER + "hasPartIDBXYWithBlanks.csv", true); reasoner.exportQueryAnswersToCsv(hasPartIdbXY, diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java index 50de20395..099e83a18 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java @@ -25,12 +25,12 @@ import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; import org.semanticweb.vlog4j.examples.ExamplesUtils; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; @@ -83,19 +83,13 @@ public static void main(final String[] args) throws EdbIdbSeparationException, I + "triplesIDB(?S, ex:hasPart, ?O) :- triplesIDB(?O, ex:isPartOf, ?S) ."; RuleParser ruleParser = new RuleParser(); - ruleParser.parse(rules); - - try (final Reasoner reasoner = Reasoner.getInstance()) { - /* - * 2. Loading, reasoning, querying and exporting, while using try-with-resources - * to close the reasoner automatically. - */ - final KnowledgeBase kb = reasoner.getKnowledgeBase(); - kb.addRules(ruleParser.getRules()); - for (DataSourceDeclaration dataSourceDeclaration : ruleParser.getDataSourceDeclartions()) { - kb.addFactsFromDataSource(dataSourceDeclaration.getPredicate(), dataSourceDeclaration.getDataSource()); - } + final KnowledgeBase kb = ruleParser.parse(rules); + /* + * 2. Loading, reasoning, querying and exporting, while using try-with-resources + * to close the reasoner automatically. + */ + try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); System.out.println("Before materialisation:"); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java index f664aa476..180931863 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java @@ -36,6 +36,7 @@ import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Variable; +import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; @@ -131,7 +132,7 @@ public static void main(final String[] args) * facts associated to the predicate publicationParents. */ - kb.addFactsFromDataSource(queryPredicate, sparqlQueryResultDataSource); + kb.addStatement(new DataSourceDeclarationImpl(queryPredicate, sparqlQueryResultDataSource)); reasoner.load(); /* @@ -172,7 +173,7 @@ public static void main(final String[] args) */ reasoner.resetReasoner(); - kb.addRules(rule); + kb.addStatement(rule); reasoner.load(); reasoner.reason(); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java index b80c368b8..247b2007e 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java @@ -100,8 +100,8 @@ public static void main(final String[] args) try (final Reasoner reasoner = Reasoner.getInstance()) { final KnowledgeBase kb = reasoner.getKnowledgeBase(); - kb.addRules(rules); - kb.addFacts(fact); + kb.addStatements(rules); + kb.addStatement(fact); /* * Default reasoner log level is WARNING. diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java index 31a6c14c5..3499bc0b6 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java @@ -71,16 +71,12 @@ public static void main(final String[] args) throws ReasonerStateException, EdbI + "isPartOfIDB(?X, ?Y) :- hasPartIDB(?Y, ?X) ."; final RuleParser ruleParser = new RuleParser(); - ruleParser.parse(rules); + final KnowledgeBase kb = ruleParser.parse(rules); /* * 2. Loading, reasoning, and querying. Use try-with resources, or remember to * call close() to free the reasoner resources. */ - final KnowledgeBase kb = new KnowledgeBase(); - kb.addRules(ruleParser.getRules()); - kb.addFacts(ruleParser.getFacts()); - try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java index b3ede8c41..dc770022f 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java @@ -107,11 +107,11 @@ public static void main(final String[] args) try (Reasoner reasoner = Reasoner.getInstance()) { final KnowledgeBase kb = reasoner.getKnowledgeBase(); - kb.addRules(GraalToVLog4JModelConverter.convertRules(graalRules)); + kb.addStatements(GraalToVLog4JModelConverter.convertRules(graalRules)); for (final GraalConjunctiveQueryToRule graalConjunctiveQueryToRule : convertedConjunctiveQueries) { - kb.addRules(graalConjunctiveQueryToRule.getRule()); + kb.addStatement(graalConjunctiveQueryToRule.getRule()); } - kb.addFacts(GraalToVLog4JModelConverter.convertAtomsToFacts(graalAtoms)); + kb.addStatements(GraalToVLog4JModelConverter.convertAtomsToFacts(graalAtoms)); reasoner.load(); System.out.println("Before materialisation:"); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java index 6eec78e42..4e9d31111 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java @@ -126,9 +126,9 @@ public static void main(final String[] args) * the reasoner automatically. */ final KnowledgeBase kb = new KnowledgeBase(); - kb.addRules(GraalToVLog4JModelConverter.convertRules(graalRules)); - kb.addRules(convertedGraalConjunctiveQuery.getRule()); - kb.addFacts(GraalToVLog4JModelConverter.convertAtomsToFacts(graalAtoms)); + kb.addStatements(GraalToVLog4JModelConverter.convertRules(graalRules)); + kb.addStatements(convertedGraalConjunctiveQuery.getRule()); + kb.addStatements(GraalToVLog4JModelConverter.convertAtomsToFacts(graalAtoms)); try (Reasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java index 77b767e74..5c336ecd4 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java @@ -35,6 +35,7 @@ import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Variable; +import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; @@ -76,7 +77,7 @@ public static void main(final String[] args) final Predicate doidTriplePredicate = makePredicate("doidTriple", 3); final DataSource doidDataSource = new RdfFileDataSource( new File(ExamplesUtils.INPUT_FOLDER + "doid.nt.gz")); - kb.addFactsFromDataSource(doidTriplePredicate, doidDataSource); + kb.addStatement(new DataSourceDeclarationImpl(doidTriplePredicate, doidDataSource)); /* Configure SPARQL data sources */ final String sparqlHumansWithDisease = "?disease wdt:P699 ?doid ."; @@ -84,21 +85,21 @@ public static void main(final String[] args) final DataSource diseasesDataSource = new SparqlQueryResultDataSource(wikidataSparqlEndpoint, "disease,doid", sparqlHumansWithDisease); final Predicate diseaseIdPredicate = Expressions.makePredicate("diseaseId", 2); - kb.addFactsFromDataSource(diseaseIdPredicate, diseasesDataSource); + kb.addStatement(new DataSourceDeclarationImpl(diseaseIdPredicate, diseasesDataSource)); final String sparqlRecentDeaths = "?human wdt:P31 wd:Q5; wdt:P570 ?deathDate . FILTER (YEAR(?deathDate) = 2018)"; // (wdt:P31 = "instance of"; wd:Q5 = "human", wdt:570 = "date of death") final DataSource recentDeathsDataSource = new SparqlQueryResultDataSource(wikidataSparqlEndpoint, "human", sparqlRecentDeaths); final Predicate recentDeathsPredicate = Expressions.makePredicate("recentDeaths", 1); - kb.addFactsFromDataSource(recentDeathsPredicate, recentDeathsDataSource); + kb.addStatement(new DataSourceDeclarationImpl(recentDeathsPredicate, recentDeathsDataSource)); final String sparqlRecentDeathsCause = sparqlRecentDeaths + "?human wdt:P509 ?causeOfDeath . "; // (wdt:P509 = "cause of death") final DataSource recentDeathsCauseDataSource = new SparqlQueryResultDataSource(wikidataSparqlEndpoint, "human,causeOfDeath", sparqlRecentDeathsCause); final Predicate recentDeathsCausePredicate = Expressions.makePredicate("recentDeathsCause", 2); - kb.addFactsFromDataSource(recentDeathsCausePredicate, recentDeathsCauseDataSource); + kb.addStatement(new DataSourceDeclarationImpl(recentDeathsCausePredicate, recentDeathsCauseDataSource)); /* Load rules from DLGP file */ try (final DlgpParser parser = new DlgpParser( @@ -106,7 +107,7 @@ public static void main(final String[] args) while (parser.hasNext()) { final Object object = parser.next(); if (object instanceof fr.lirmm.graphik.graal.api.core.Rule) { - kb.addRules( + kb.addStatement( GraalToVLog4JModelConverter.convertRule((fr.lirmm.graphik.graal.api.core.Rule) object)); } } @@ -122,11 +123,11 @@ public static void main(final String[] args) final PositiveLiteral deathCause = Expressions.makePositiveLiteral("deathCause", x, y); final PositiveLiteral humansWhoDiedOfNoncancer = Expressions.makePositiveLiteral("humansWhoDiedOfNoncancer", x); - kb.addRules(Expressions.makeRule(Expressions.makePositiveConjunction(humansWhoDiedOfNoncancer), + kb.addStatement(Expressions.makeRule(Expressions.makePositiveConjunction(humansWhoDiedOfNoncancer), Expressions.makeConjunction(deathCause, diseaseId, notCancerDisease))); // humansWhoDiedOfNoncancer(X) :- deathCause(X,Y), ~hasDoid(Y) final NegativeLiteral hasNotDoid = Expressions.makeNegativeLiteral("hasDoid", y); - kb.addRules(Expressions.makeRule(Expressions.makePositiveConjunction(humansWhoDiedOfNoncancer), + kb.addStatement(Expressions.makeRule(Expressions.makePositiveConjunction(humansWhoDiedOfNoncancer), Expressions.makeConjunction(deathCause, hasNotDoid))); System.out.println("Rules configured:\n--"); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java index 6f6827921..7beb9f6fb 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java @@ -87,8 +87,8 @@ public static void main(final String[] args) throws OWLOntologyCreationException System.out.println(); final KnowledgeBase kb = new KnowledgeBase(); - kb.addRules(new ArrayList<>(owlToRulesConverter.getRules())); - kb.addFacts(owlToRulesConverter.getFacts()); + kb.addStatements(new ArrayList<>(owlToRulesConverter.getRules())); + kb.addStatements(owlToRulesConverter.getFacts()); try (VLogReasoner reasoner = new VLogReasoner(kb)) { /* Load rules and facts obtained from the ontology */ diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java index d9799dd8d..9dcb32d00 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java @@ -124,26 +124,16 @@ public static void main(final String[] args) throws IOException, RDFParseExcepti + "hasOrganizationName(?Person, ?OrgName) :- " + " TRIPLE(?Person, cnf:hasAffiliation, ?Aff), TRIPLE(?Aff, cnf:withOrganisation, ?Org)," + " TRIPLE(?Org, cnf:name, ?OrgName) ."; + KnowledgeBase kb; final RuleParser ruleParser = new RuleParser(); try { - ruleParser.parse(rules); + kb = ruleParser.parse(rules); } catch (final ParsingException e) { System.out.println("Failed to parse rules: " + e.getMessage()); return; } - - final KnowledgeBase kb = new KnowledgeBase(); - /* - * The rule that maps people to their organization name based on facts extracted - * from RDF triples is added to the Reasoner's knowledge base. - */ - kb.addRules(ruleParser.getRules()); - /* - * Facts extracted from the RDF resources are added to the Reasoner's knowledge - * base. - */ - kb.addFacts(tripleFactsISWC2016); - kb.addFacts(tripleFactsISWC2017); + kb.addStatements(tripleFactsISWC2016); + kb.addStatements(tripleFactsISWC2017); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index 26891374d..469b6e4e3 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -22,14 +22,11 @@ import java.io.ByteArrayInputStream; import java.io.InputStream; -import java.util.List; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.javacc.JavaCCParser; import org.semanticweb.vlog4j.parser.javacc.ParseException; import org.semanticweb.vlog4j.parser.javacc.TokenMgrError; @@ -48,18 +45,18 @@ public class RuleParser { JavaCCParser parser; - public void parse(InputStream stream, String encoding) throws ParsingException { + public KnowledgeBase parse(InputStream stream, String encoding) throws ParsingException { parser = new JavaCCParser(stream, encoding); - doParse(); + return doParse(); } - public void parse(InputStream stream) throws ParsingException { - parse(stream, "UTF-8"); + public KnowledgeBase parse(InputStream stream) throws ParsingException { + return parse(stream, "UTF-8"); } - public void parse(String input) throws ParsingException { + public KnowledgeBase parse(String input) throws ParsingException { InputStream inputStream = new ByteArrayInputStream(input.getBytes()); - parse(inputStream, "UTF-8"); + return parse(inputStream, "UTF-8"); } public Literal parseLiteral(String input) throws ParsingException { @@ -82,24 +79,13 @@ public PositiveLiteral parsePositiveLiteral(String input) throws ParsingExceptio } } - void doParse() throws ParsingException { + KnowledgeBase doParse() throws ParsingException { try { parser.parse(); + return parser.getKnowledgeBase(); } catch (ParseException | PrefixDeclarationException | TokenMgrError e) { throw new ParsingException(e.getMessage(), e); } } - public List getRules() { - return parser.getRules(); - } - - public List getFacts() { - return parser.getFacts(); - } - - public List getDataSourceDeclartions() { - return parser.getDataSourceDeclartions(); - } - } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 49f1257b7..cee0522f3 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -25,6 +25,7 @@ import org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; @@ -140,15 +141,14 @@ DataSource dataSource() throws PrefixDeclarationException: void statement() throws PrefixDeclarationException: { - Rule r; - Fact l; + Statement statement; resetVariableSets(); } { - LOOKAHEAD(rule()) r = rule() { rules.add(r);} -| l = fact(FormulaContext.HEAD) < DOT > //not from a rule + LOOKAHEAD(rule()) statement = rule() { knowledgeBase.addStatement(statement);} +| statement = fact(FormulaContext.HEAD) < DOT > //not from a rule { - facts.add(l); + knowledgeBase.addStatement(statement); } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 39ffdcf18..52e144235 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -20,18 +20,14 @@ * #L% */ -import java.util.List; -import java.util.ArrayList; import java.util.HashSet; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; -import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.LocalPrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Predicate; @@ -52,9 +48,7 @@ public class JavaCCParserBase { final PrefixDeclarations prefixDeclarations = new LocalPrefixDeclarations(); - final List rules = new ArrayList<>(); - final List facts = new ArrayList<>(); - final List dataSourceDaclarations = new ArrayList<>(); + final KnowledgeBase knowledgeBase = new KnowledgeBase(); /** * "Local" variable to remember (universal) body variables during parsing. @@ -102,7 +96,7 @@ Constant createDoubleLiteral(String lexicalForm) { void addDataSource(String predicateName, int arity, DataSource dataSource) { Predicate predicate = Expressions.makePredicate(predicateName, arity); - dataSourceDaclarations.add(new DataSourceDeclarationImpl(predicate, dataSource)); + knowledgeBase.addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); } static String unescapeStr(String s, int line, int column) throws ParseException { @@ -218,16 +212,8 @@ void resetVariableSets() { this.headUniVars.clear(); } - public List getRules() { - return rules; - } - - public List getFacts() { - return facts; - } - - public List getDataSourceDeclartions() { - return dataSourceDaclarations; + public KnowledgeBase getKnowledgeBase() { + return knowledgeBase; } } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index c78808e7f..4ff4e5fc2 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -26,17 +26,21 @@ import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; +import java.util.ArrayList; import java.util.Arrays; import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.Variable; +import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; @@ -69,41 +73,41 @@ public class RuleParserTest { public void testExplicitIri() throws ParsingException { String input = "() ."; RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); - assertEquals(Arrays.asList(fact), ruleParser.getFacts()); + ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(fact), statements); } @Test public void testPrefixResolution() throws ParsingException { String input = "@prefix ex: . ex:s(ex:c) ."; RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); - assertEquals(Arrays.asList(fact), ruleParser.getFacts()); + ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(fact), statements); } @Test public void testBaseRelativeResolution() throws ParsingException { String input = "@base . () ."; RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); - assertEquals(Arrays.asList(fact), ruleParser.getFacts()); + ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(fact), statements); } @Test public void testBaseResolution() throws ParsingException { String input = "@base . s(c) ."; RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); - assertEquals(Arrays.asList(fact), ruleParser.getFacts()); + ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(fact), statements); } @Test public void testNoBaseRelativeIri() throws ParsingException { String input = "s(c) ."; RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); + ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); PositiveLiteral atom = Expressions.makePositiveLiteral("s", Expressions.makeConstant("c")); - assertEquals(Arrays.asList(atom), ruleParser.getFacts()); + assertEquals(Arrays.asList(atom), statements); } @Test(expected = ParsingException.class) @@ -145,16 +149,16 @@ public void testNoExistentialLiterals() throws ParsingException { public void testSimpleRule() throws ParsingException { String input = "@base . " + " q(?X, !Y), r(?X, d) :- p(?X,c), p(?X,?Z) . "; RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); - assertEquals(Arrays.asList(rule1), ruleParser.getRules()); + ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(rule1), statements); } @Test public void testNegationRule() throws ParsingException { String input = "@base . " + " q(?X, !Y), r(?X, d) :- ~p(?X,c), p(?X,?Z) . "; RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); - assertEquals(Arrays.asList(rule2), ruleParser.getRules()); + ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(rule2), statements); } @Test(expected = ParsingException.class) @@ -169,8 +173,8 @@ public void testWhiteSpace() throws ParsingException { String input = "@base \n\n . " + " q(?X, !Y) , r(?X, d\t ) \n\n:- p(?X,c), p(?X,\n?Z) \n. "; RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); - assertEquals(Arrays.asList(rule1), ruleParser.getRules()); + ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(rule1), statements); } @Test(expected = ParsingException.class) @@ -205,58 +209,58 @@ public void testNoDollarVariables() throws ParsingException { public void testIntegerLiteral() throws ParsingException { String input = "p(42) ."; RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); + ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("42^^<" + PrefixDeclarations.XSD_INTEGER + ">")); - assertEquals(Arrays.asList(integerLiteral), ruleParser.getFacts()); + assertEquals(Arrays.asList(integerLiteral), statements); } @Test public void testAbbreviatedIntegerLiteral() throws ParsingException { String input = "@prefix xsd: <" + PrefixDeclarations.XSD + "> . " + "p(\"42\"^^xsd:integer) ."; RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); + ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("\"42\"^^<" + PrefixDeclarations.XSD_INTEGER + ">")); - assertEquals(Arrays.asList(integerLiteral), ruleParser.getFacts()); + assertEquals(Arrays.asList(integerLiteral), statements); } @Test public void testFullIntegerLiteral() throws ParsingException { String input = "p(\"42\"^^<" + PrefixDeclarations.XSD_INTEGER + "> ) ."; RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); + ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("\"42\"^^<" + PrefixDeclarations.XSD_INTEGER + ">")); - assertEquals(Arrays.asList(integerLiteral), ruleParser.getFacts()); + assertEquals(Arrays.asList(integerLiteral), statements); } @Test public void testDecimalLiteral() throws ParsingException { String input = "p(-5.0) ."; RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); + ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); PositiveLiteral decimalLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("-5.0^^<" + PrefixDeclarations.XSD_DECIMAL + ">")); - assertEquals(Arrays.asList(decimalLiteral), ruleParser.getFacts()); + assertEquals(Arrays.asList(decimalLiteral), statements); } @Test public void testDoubleLiteral() throws ParsingException { String input = "p(4.2E9) ."; RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); + ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); PositiveLiteral doubleLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("4.2E9^^<" + PrefixDeclarations.XSD_DOUBLE + ">")); - assertEquals(Arrays.asList(doubleLiteral), ruleParser.getFacts()); + assertEquals(Arrays.asList(doubleLiteral), statements); } @Test public void testStringLiteral() throws ParsingException { String input = "p(\"abc\") ."; RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); - assertEquals(Arrays.asList(fact2), ruleParser.getFacts()); + ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(fact2), statements); } @Test(expected = ParsingException.class) @@ -270,10 +274,10 @@ public void testIncompleteStringLiteral() throws ParsingException { public void testStringLiteralEscapes() throws ParsingException { String input = "p(\"_\\\"_\\\\_\\n_\\t_\") ."; // User input: p("_\"_\\_\n_\t_") RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); + ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeConstant("\"_\"_\\_\n_\t_\"^^<" + PrefixDeclarations.XSD_STRING + ">")); - assertEquals(Arrays.asList(fact), ruleParser.getFacts()); + assertEquals(Arrays.asList(fact), statements); } @Test @@ -281,20 +285,20 @@ public void testStringLiteralAllEscapes() throws ParsingException { // User input: p("_\n_\t_\r_\b_\f_\'_\"_\\_") String input = "p(\"_\\n_\\t_\\r_\\b_\\f_\\'_\\\"_\\\\_\") ."; RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); + ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeConstant("\"_\n_\t_\r_\b_\f_\'_\"_\\_\"^^<" + PrefixDeclarations.XSD_STRING + ">")); - assertEquals(Arrays.asList(fact), ruleParser.getFacts()); + assertEquals(Arrays.asList(fact), statements); } @Test public void testStringLiteralMultiLine() throws ParsingException { String input = "p('''line 1\n\n" + "line 2\n" + "line 3''') ."; // User input: p("a\"b\\c") RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); + ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeConstant("\"line 1\n\nline 2\nline 3\"^^<" + PrefixDeclarations.XSD_STRING + ">")); - assertEquals(Arrays.asList(fact), ruleParser.getFacts()); + assertEquals(Arrays.asList(fact), statements); } @Test(expected = ParsingException.class) @@ -308,41 +312,41 @@ public void testIncompleteStringLiteralMultiLine() throws ParsingException { public void testFullLiteral() throws ParsingException { String input = "p(\"abc\"^^) ."; RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); - assertEquals(Arrays.asList(fact2), ruleParser.getFacts()); + ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(fact2), statements); } @Test public void testUnicodeLiteral() throws ParsingException { String input = "p(\"\\u0061\\u0062\\u0063\") ."; // "abc" RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); - assertEquals(Arrays.asList(fact2), ruleParser.getFacts()); + ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(fact2), statements); } @Test public void testUnicodeUri() throws ParsingException { String input = "@base . @prefix ex: . ex:\\u0073(c) ."; RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); - assertEquals(Arrays.asList(fact), ruleParser.getFacts()); + ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(fact), statements); } @Test public void testPrefixedLiteral() throws ParsingException { String input = "@prefix xsd: <" + PrefixDeclarations.XSD + "> . " + "p(\"abc\"^^xsd:string) ."; RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); - assertEquals(Arrays.asList(fact2), ruleParser.getFacts()); + ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(fact2), statements); } @Test public void testLangStringLiteral() throws ParsingException { String input = "p(\"abc\"@en-gb) ."; RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); + ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeConstant("\"abc\"@en-gb")); - assertEquals(Arrays.asList(fact), ruleParser.getFacts()); + assertEquals(Arrays.asList(fact), statements); } @Test @@ -350,8 +354,8 @@ public void testLineComments() throws ParsingException { String input = "@prefix ex: . % comment \n" + "%@prefix ex: \n" + " ex:s(ex:c) . % comment \n"; RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); - assertEquals(Arrays.asList(fact), ruleParser.getFacts()); + ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(fact), statements); } @Test @@ -388,24 +392,22 @@ public void tesLiteralError() throws ParsingException { public void testCsvSource() throws ParsingException, IOException { String input = "@source p(2) : load-csv(\"src/main/data/input/example.csv\") ."; RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); + ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); CsvFileDataSource csvds = new CsvFileDataSource(new File("src/main/data/input/example.csv")); Predicate p = Expressions.makePredicate("p", 2); - assertEquals(1, ruleParser.getDataSourceDeclartions().size()); - assertEquals(p, ruleParser.getDataSourceDeclartions().get(0).getPredicate()); - assertEquals(csvds, ruleParser.getDataSourceDeclartions().get(0).getDataSource()); + DataSourceDeclaration d = new DataSourceDeclarationImpl(p, csvds); + assertEquals(Arrays.asList(d), statements); } @Test public void testRdfSource() throws ParsingException, IOException { String input = "@source p(3) : load-rdf(\"src/main/data/input/example.nt.gz\") ."; RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); + ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); RdfFileDataSource rdfds = new RdfFileDataSource(new File("src/main/data/input/example.nt.gz")); Predicate p = Expressions.makePredicate("p", 3); - assertEquals(1, ruleParser.getDataSourceDeclartions().size()); - assertEquals(p, ruleParser.getDataSourceDeclartions().get(0).getPredicate()); - assertEquals(rdfds, ruleParser.getDataSourceDeclartions().get(0).getDataSource()); + DataSourceDeclaration d = new DataSourceDeclarationImpl(p, rdfds); + assertEquals(Arrays.asList(d), statements); } @Test(expected = ParsingException.class) @@ -419,13 +421,12 @@ public void testRdfSourceInvalidArity() throws ParsingException, IOException { public void testSparqlSource() throws ParsingException, MalformedURLException { String input = "@source p(2) : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); + ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); SparqlQueryResultDataSource sparqlds = new SparqlQueryResultDataSource( new URL("https://query.wikidata.org/sparql"), "disease, doid", "?disease wdt:P699 ?doid ."); Predicate p = Expressions.makePredicate("p", 2); - assertEquals(1, ruleParser.getDataSourceDeclartions().size()); - assertEquals(p, ruleParser.getDataSourceDeclartions().get(0).getPredicate()); - assertEquals(sparqlds, ruleParser.getDataSourceDeclartions().get(0).getDataSource()); + DataSourceDeclaration d = new DataSourceDeclarationImpl(p, sparqlds); + assertEquals(Arrays.asList(d), statements); } @Test(expected = ParsingException.class) diff --git a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java b/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java index cfcbd5790..85c7f24b3 100644 --- a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java +++ b/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java @@ -73,7 +73,7 @@ public void testCanLoadRdfFactsIntoReasoner() throws RDFParseException, RDFHandl final Set facts = RdfModelConverter.rdfModelToFacts(model); final KnowledgeBase kb = new KnowledgeBase(); - kb.addFacts(facts); + kb.addStatements(facts); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); @@ -93,7 +93,7 @@ public void testQueryAnsweringOverRdfFacts() throws RDFParseException, RDFHandle final Set facts = RdfModelConverter.rdfModelToFacts(model); final KnowledgeBase kb = new KnowledgeBase(); - kb.addFacts(facts); + kb.addStatements(facts); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); From abd9a2a6bd5f97002411ba3c154e9e56022ddbbb Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 21 Aug 2019 08:24:13 +0200 Subject: [PATCH 0136/1003] Make RuleParser static --- .../vlog4j/examples/CountingTriangles.java | 9 +- .../vlog4j/examples/DoidExample.java | 5 +- .../vlog4j/examples/ExamplesUtils.java | 3 +- .../examples/SimpleReasoningExample.java | 3 +- .../examples/core/AddDataFromCsvFile.java | 7 +- .../examples/core/AddDataFromRdfFile.java | 7 +- .../SkolemVsRestrictedChaseTermination.java | 5 +- .../examples/rdf/AddDataFromRdfModel.java | 3 +- .../semanticweb/vlog4j/parser/RuleParser.java | 19 ++- .../vlog4j/syntax/parser/RuleParserTest.java | 132 ++++++------------ 10 files changed, 69 insertions(+), 124 deletions(-) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java index 6883d6a0d..c20ff05c0 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java @@ -46,9 +46,8 @@ public static void main(final String[] args) throws IOException { KnowledgeBase kb; /* Configure rules */ - final RuleParser ruleParser = new RuleParser(); try { - kb = ruleParser.parse(new FileInputStream(ExamplesUtils.INPUT_FOLDER + "counting-triangles.rls")); + kb = RuleParser.parse(new FileInputStream(ExamplesUtils.INPUT_FOLDER + "counting-triangles.rls")); } catch (final ParsingException e) { System.out.println("Failed to parse rules: " + e.getMessage()); return; @@ -72,15 +71,15 @@ public static void main(final String[] args) throws IOException { try { PositiveLiteral query; - query = ruleParser.parsePositiveLiteral("country(?X)"); + query = RuleParser.parsePositiveLiteral("country(?X)"); System.out.print("Found " + ExamplesUtils.iteratorSize(reasoner.answerQuery(query, true)) + " countries in Wikidata"); // Due to symmetry, each joint border is found twice, hence we divide by 2: - query = ruleParser.parsePositiveLiteral("shareBorder(?X,?Y)"); + query = RuleParser.parsePositiveLiteral("shareBorder(?X,?Y)"); System.out.println(", with " + (ExamplesUtils.iteratorSize(reasoner.answerQuery(query, true)) / 2) + " pairs of them sharing a border."); // Due to symmetry, each triangle is found six times, hence we divide by 6: - query = ruleParser.parsePositiveLiteral("triangle(?X,?Y,?Z)"); + query = RuleParser.parsePositiveLiteral("triangle(?X,?Y,?Z)"); System.out.println("The number of triangles of countries that mutually border each other was " + (ExamplesUtils.iteratorSize(reasoner.answerQuery(query, true)) / 6) + "."); } catch (final ParsingException e) { diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java index 73a7c36c5..2d14c2438 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java @@ -52,9 +52,8 @@ public static void main(final String[] args) throws IOException { /* Configure rules */ KnowledgeBase kb; - final RuleParser ruleParser = new RuleParser(); try { - kb = ruleParser.parse(new FileInputStream(ExamplesUtils.INPUT_FOLDER + "/doid.rls")); + kb = RuleParser.parse(new FileInputStream(ExamplesUtils.INPUT_FOLDER + "/doid.rls")); } catch (final ParsingException e) { System.out.println("Failed to parse rules: " + e.getMessage()); return; @@ -82,7 +81,7 @@ public static void main(final String[] args) throws IOException { System.out.println("\nNumber of inferred tuples for selected query atoms:"); for (final String queryString : queries) { try { - final PositiveLiteral query = ruleParser.parsePositiveLiteral(queryString); + final PositiveLiteral query = RuleParser.parsePositiveLiteral(queryString); answers = reasoner.answerQuery(query, true); System.out.println(" " + query.toString() + ": " + ExamplesUtils.iteratorSize(answers)); } catch (final ParsingException e) { diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java index 0ff1564ca..b6a456484 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java @@ -105,9 +105,8 @@ public static void printOutQueryAnswers(final PositiveLiteral queryAtom, final R */ public static void printOutQueryAnswers(final String queryString, final Reasoner reasoner) throws ReasonerStateException { - RuleParser ruleParser = new RuleParser(); try { - PositiveLiteral query = ruleParser.parsePositiveLiteral(queryString); + PositiveLiteral query = RuleParser.parsePositiveLiteral(queryString); printOutQueryAnswers(query, reasoner); } catch (ParsingException e) { throw new RuntimeException(e.getMessage(), e); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java index d78c1ee33..88d27b830 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java @@ -69,10 +69,9 @@ public static void main(final String[] args) throws IOException { System.out.println("Knowledge base used in this example:\n\n" + rules); - final RuleParser ruleParser = new RuleParser(); KnowledgeBase kb; try { - kb = ruleParser.parse(rules); + kb = RuleParser.parse(rules); } catch (final ParsingException e) { System.out.println("Failed to parse rules: " + e.getMessage()); return; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java index c82d67fcd..34385f56d 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java @@ -76,8 +76,7 @@ public static void main(final String[] args) throws EdbIdbSeparationException, I + "hasPartIDB(?X, ?Y) :- isPartOfIDB(?Y, ?X) ." // + "isPartOfIDB(?X, ?Y) :- hasPartIDB(?Y, ?X) ."; - RuleParser ruleParser = new RuleParser(); - final KnowledgeBase kb = ruleParser.parse(rules); + final KnowledgeBase kb = RuleParser.parse(rules); /* * Loading, reasoning, and querying while using try-with-resources to close the @@ -92,7 +91,7 @@ public static void main(final String[] args) throws EdbIdbSeparationException, I /* The reasoner will use the Restricted Chase by default. */ reasoner.reason(); System.out.println("After materialisation:"); - final PositiveLiteral hasPartIdbXY = ruleParser.parsePositiveLiteral("hasPartIDB(?X, ?Y)"); + final PositiveLiteral hasPartIdbXY = RuleParser.parsePositiveLiteral("hasPartIDB(?X, ?Y)"); ExamplesUtils.printOutQueryAnswers(hasPartIdbXY, reasoner); /* Exporting query answers to {@code .csv} files. */ @@ -101,7 +100,7 @@ public static void main(final String[] args) throws EdbIdbSeparationException, I reasoner.exportQueryAnswersToCsv(hasPartIdbXY, ExamplesUtils.OUTPUT_FOLDER + "hasPartIDBXYWithoutBlanks.csv", false); - final PositiveLiteral hasPartIDBRedBikeY = ruleParser.parsePositiveLiteral("hasPartIDB(redBike, ?Y)"); + final PositiveLiteral hasPartIDBRedBikeY = RuleParser.parsePositiveLiteral("hasPartIDB(redBike, ?Y)"); reasoner.exportQueryAnswersToCsv(hasPartIDBRedBikeY, ExamplesUtils.OUTPUT_FOLDER + "hasPartIDBRedBikeYWithBlanks.csv", true); } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java index 099e83a18..a04ec5d69 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java @@ -82,8 +82,7 @@ public static void main(final String[] args) throws EdbIdbSeparationException, I + "triplesIDB(?S, ex:isPartOf, ?O) :- triplesIDB(?O, ex:hasPart, ?S) ." + "triplesIDB(?S, ex:hasPart, ?O) :- triplesIDB(?O, ex:isPartOf, ?S) ."; - RuleParser ruleParser = new RuleParser(); - final KnowledgeBase kb = ruleParser.parse(rules); + final KnowledgeBase kb = RuleParser.parse(rules); /* * 2. Loading, reasoning, querying and exporting, while using try-with-resources @@ -99,7 +98,7 @@ public static void main(final String[] args) throws EdbIdbSeparationException, I /* The reasoner will use the Restricted Chase by default. */ reasoner.reason(); System.out.println("After materialisation:"); - final PositiveLiteral hasPartIDB = ruleParser + final PositiveLiteral hasPartIDB = RuleParser .parsePositiveLiteral("triplesIDB(?X, , ?Y)"); ExamplesUtils.printOutQueryAnswers(hasPartIDB, reasoner); @@ -109,7 +108,7 @@ public static void main(final String[] args) throws EdbIdbSeparationException, I reasoner.exportQueryAnswersToCsv(hasPartIDB, ExamplesUtils.OUTPUT_FOLDER + "ternaryHasPartIDBWithoutBlanks.csv", false); - final PositiveLiteral existsHasPartRedBike = ruleParser.parsePositiveLiteral( + final PositiveLiteral existsHasPartRedBike = RuleParser.parsePositiveLiteral( "triplesIDB(, , ?X)"); reasoner.exportQueryAnswersToCsv(existsHasPartRedBike, ExamplesUtils.OUTPUT_FOLDER + "existsHasPartIDBRedBikeWithBlanks.csv", true); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java index 3499bc0b6..809362158 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java @@ -70,8 +70,7 @@ public static void main(final String[] args) throws ReasonerStateException, EdbI + "hasPartIDB(?X, ?Y) :- isPartOfIDB(?Y, ?X) ." // + "isPartOfIDB(?X, ?Y) :- hasPartIDB(?Y, ?X) ."; - final RuleParser ruleParser = new RuleParser(); - final KnowledgeBase kb = ruleParser.parse(rules); + final KnowledgeBase kb = RuleParser.parse(rules); /* * 2. Loading, reasoning, and querying. Use try-with resources, or remember to @@ -80,7 +79,7 @@ public static void main(final String[] args) throws ReasonerStateException, EdbI try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); - final PositiveLiteral queryHasPart = ruleParser.parsePositiveLiteral("hasPartIDB(?X, ?Y)"); + final PositiveLiteral queryHasPart = RuleParser.parsePositiveLiteral("hasPartIDB(?X, ?Y)"); /* See that there is no fact HasPartIDB before reasoning. */ System.out.println("Before reasoning is started, no inferrences have been computed yet."); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java index 9dcb32d00..0d13eefe7 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java @@ -125,9 +125,8 @@ public static void main(final String[] args) throws IOException, RDFParseExcepti + " TRIPLE(?Person, cnf:hasAffiliation, ?Aff), TRIPLE(?Aff, cnf:withOrganisation, ?Org)," + " TRIPLE(?Org, cnf:name, ?OrgName) ."; KnowledgeBase kb; - final RuleParser ruleParser = new RuleParser(); try { - kb = ruleParser.parse(rules); + kb = RuleParser.parse(rules); } catch (final ParsingException e) { System.out.println("Failed to parse rules: " + e.getMessage()); return; diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index 469b6e4e3..b91cfd6dc 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -34,7 +34,7 @@ import org.semanticweb.vlog4j.parser.ParsingException; /** - * Class to access VLog parsing functionality. + * Class to statically access VLog parsing functionality. * * @FIXME Support parsing from multiple files (into one KB). * @@ -43,23 +43,20 @@ */ public class RuleParser { - JavaCCParser parser; - - public KnowledgeBase parse(InputStream stream, String encoding) throws ParsingException { - parser = new JavaCCParser(stream, encoding); - return doParse(); + public static KnowledgeBase parse(InputStream stream, String encoding) throws ParsingException { + return doParse(new JavaCCParser(stream, encoding)); } - public KnowledgeBase parse(InputStream stream) throws ParsingException { + public static KnowledgeBase parse(InputStream stream) throws ParsingException { return parse(stream, "UTF-8"); } - public KnowledgeBase parse(String input) throws ParsingException { + public static KnowledgeBase parse(String input) throws ParsingException { InputStream inputStream = new ByteArrayInputStream(input.getBytes()); return parse(inputStream, "UTF-8"); } - public Literal parseLiteral(String input) throws ParsingException { + public static Literal parseLiteral(String input) throws ParsingException { InputStream inputStream = new ByteArrayInputStream(input.getBytes()); JavaCCParser localParser = new JavaCCParser(inputStream, "UTF-8"); try { @@ -69,7 +66,7 @@ public Literal parseLiteral(String input) throws ParsingException { } } - public PositiveLiteral parsePositiveLiteral(String input) throws ParsingException { + public static PositiveLiteral parsePositiveLiteral(String input) throws ParsingException { InputStream inputStream = new ByteArrayInputStream(input.getBytes()); JavaCCParser localParser = new JavaCCParser(inputStream, "UTF-8"); try { @@ -79,7 +76,7 @@ public PositiveLiteral parsePositiveLiteral(String input) throws ParsingExceptio } } - KnowledgeBase doParse() throws ParsingException { + static KnowledgeBase doParse(JavaCCParser parser) throws ParsingException { try { parser.parse(); return parser.getKnowledgeBase(); diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 4ff4e5fc2..dc1598278 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -72,40 +72,35 @@ public class RuleParserTest { @Test public void testExplicitIri() throws ParsingException { String input = "() ."; - RuleParser ruleParser = new RuleParser(); - ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); assertEquals(Arrays.asList(fact), statements); } @Test public void testPrefixResolution() throws ParsingException { String input = "@prefix ex: . ex:s(ex:c) ."; - RuleParser ruleParser = new RuleParser(); - ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); assertEquals(Arrays.asList(fact), statements); } @Test public void testBaseRelativeResolution() throws ParsingException { String input = "@base . () ."; - RuleParser ruleParser = new RuleParser(); - ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); assertEquals(Arrays.asList(fact), statements); } @Test public void testBaseResolution() throws ParsingException { String input = "@base . s(c) ."; - RuleParser ruleParser = new RuleParser(); - ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); assertEquals(Arrays.asList(fact), statements); } @Test public void testNoBaseRelativeIri() throws ParsingException { String input = "s(c) ."; - RuleParser ruleParser = new RuleParser(); - ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); PositiveLiteral atom = Expressions.makePositiveLiteral("s", Expressions.makeConstant("c")); assertEquals(Arrays.asList(atom), statements); } @@ -113,103 +108,89 @@ public void testNoBaseRelativeIri() throws ParsingException { @Test(expected = ParsingException.class) public void testPrefixConflict() throws ParsingException { String input = "@prefix ex: . @prefix ex: . s(c) ."; - RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); + RuleParser.parse(input); } @Test(expected = ParsingException.class) public void testBaseConflict() throws ParsingException { String input = "@base . @base . s(c) ."; - RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); + RuleParser.parse(input); } @Test(expected = ParsingException.class) public void testMissingPrefix() throws ParsingException { String input = "ex:s(c) ."; - RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); + RuleParser.parse(input); } @Test(expected = ParsingException.class) public void testNoUniversalLiterals() throws ParsingException { String input = "p(?X) ."; - RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); + RuleParser.parse(input); } @Test(expected = ParsingException.class) public void testNoExistentialLiterals() throws ParsingException { String input = "p(!X) ."; - RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); + RuleParser.parse(input); } @Test public void testSimpleRule() throws ParsingException { String input = "@base . " + " q(?X, !Y), r(?X, d) :- p(?X,c), p(?X,?Z) . "; - RuleParser ruleParser = new RuleParser(); - ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); assertEquals(Arrays.asList(rule1), statements); } @Test public void testNegationRule() throws ParsingException { String input = "@base . " + " q(?X, !Y), r(?X, d) :- ~p(?X,c), p(?X,?Z) . "; - RuleParser ruleParser = new RuleParser(); - ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); assertEquals(Arrays.asList(rule2), statements); } @Test(expected = ParsingException.class) public void testUnsafeNegationRule() throws ParsingException { String input = "@base . " + " q(?X, !Y), r(?X, d) :- ~p(?Y,c), p(?X,?Z) . "; - RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); + RuleParser.parse(input); } @Test public void testWhiteSpace() throws ParsingException { String input = "@base \n\n . " + " q(?X, !Y) , r(?X, d\t ) \n\n:- p(?X,c), p(?X,\n?Z) \n. "; - RuleParser ruleParser = new RuleParser(); - ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); assertEquals(Arrays.asList(rule1), statements); } @Test(expected = ParsingException.class) public void testNoUnsafeVariables() throws ParsingException { String input = "p(?X,?Y) :- q(?X) ."; - RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); + RuleParser.parse(input); } @Test(expected = ParsingException.class) public void testNoConflictingQuantificationVariables() throws ParsingException { String input = "p(?X,!X) :- q(?X) ."; - RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); + RuleParser.parse(input); } @Test(expected = ParsingException.class) public void testNoBodyExistential() throws ParsingException { String input = "p(?X) :- q(?X,!Y) ."; - RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); + RuleParser.parse(input); } @Test(expected = ParsingException.class) public void testNoDollarVariables() throws ParsingException { String input = "p($X) :- q($X) ."; - RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); + RuleParser.parse(input); } @Test public void testIntegerLiteral() throws ParsingException { String input = "p(42) ."; - RuleParser ruleParser = new RuleParser(); - ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("42^^<" + PrefixDeclarations.XSD_INTEGER + ">")); assertEquals(Arrays.asList(integerLiteral), statements); @@ -218,8 +199,7 @@ public void testIntegerLiteral() throws ParsingException { @Test public void testAbbreviatedIntegerLiteral() throws ParsingException { String input = "@prefix xsd: <" + PrefixDeclarations.XSD + "> . " + "p(\"42\"^^xsd:integer) ."; - RuleParser ruleParser = new RuleParser(); - ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("\"42\"^^<" + PrefixDeclarations.XSD_INTEGER + ">")); assertEquals(Arrays.asList(integerLiteral), statements); @@ -228,8 +208,7 @@ public void testAbbreviatedIntegerLiteral() throws ParsingException { @Test public void testFullIntegerLiteral() throws ParsingException { String input = "p(\"42\"^^<" + PrefixDeclarations.XSD_INTEGER + "> ) ."; - RuleParser ruleParser = new RuleParser(); - ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("\"42\"^^<" + PrefixDeclarations.XSD_INTEGER + ">")); assertEquals(Arrays.asList(integerLiteral), statements); @@ -238,8 +217,7 @@ public void testFullIntegerLiteral() throws ParsingException { @Test public void testDecimalLiteral() throws ParsingException { String input = "p(-5.0) ."; - RuleParser ruleParser = new RuleParser(); - ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); PositiveLiteral decimalLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("-5.0^^<" + PrefixDeclarations.XSD_DECIMAL + ">")); assertEquals(Arrays.asList(decimalLiteral), statements); @@ -248,8 +226,7 @@ public void testDecimalLiteral() throws ParsingException { @Test public void testDoubleLiteral() throws ParsingException { String input = "p(4.2E9) ."; - RuleParser ruleParser = new RuleParser(); - ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); PositiveLiteral doubleLiteral = Expressions.makePositiveLiteral("p", Expressions.makeConstant("4.2E9^^<" + PrefixDeclarations.XSD_DOUBLE + ">")); assertEquals(Arrays.asList(doubleLiteral), statements); @@ -258,23 +235,20 @@ public void testDoubleLiteral() throws ParsingException { @Test public void testStringLiteral() throws ParsingException { String input = "p(\"abc\") ."; - RuleParser ruleParser = new RuleParser(); - ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); assertEquals(Arrays.asList(fact2), statements); } @Test(expected = ParsingException.class) public void testIncompleteStringLiteral() throws ParsingException { String input = "p(\"abc) ."; - RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); + RuleParser.parse(input); } @Test public void testStringLiteralEscapes() throws ParsingException { String input = "p(\"_\\\"_\\\\_\\n_\\t_\") ."; // User input: p("_\"_\\_\n_\t_") - RuleParser ruleParser = new RuleParser(); - ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeConstant("\"_\"_\\_\n_\t_\"^^<" + PrefixDeclarations.XSD_STRING + ">")); assertEquals(Arrays.asList(fact), statements); @@ -284,8 +258,7 @@ public void testStringLiteralEscapes() throws ParsingException { public void testStringLiteralAllEscapes() throws ParsingException { // User input: p("_\n_\t_\r_\b_\f_\'_\"_\\_") String input = "p(\"_\\n_\\t_\\r_\\b_\\f_\\'_\\\"_\\\\_\") ."; - RuleParser ruleParser = new RuleParser(); - ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeConstant("\"_\n_\t_\r_\b_\f_\'_\"_\\_\"^^<" + PrefixDeclarations.XSD_STRING + ">")); assertEquals(Arrays.asList(fact), statements); @@ -294,8 +267,7 @@ public void testStringLiteralAllEscapes() throws ParsingException { @Test public void testStringLiteralMultiLine() throws ParsingException { String input = "p('''line 1\n\n" + "line 2\n" + "line 3''') ."; // User input: p("a\"b\\c") - RuleParser ruleParser = new RuleParser(); - ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeConstant("\"line 1\n\nline 2\nline 3\"^^<" + PrefixDeclarations.XSD_STRING + ">")); assertEquals(Arrays.asList(fact), statements); @@ -304,47 +276,41 @@ public void testStringLiteralMultiLine() throws ParsingException { @Test(expected = ParsingException.class) public void testIncompleteStringLiteralMultiLine() throws ParsingException { String input = "p('''abc\ndef'') ."; - RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); + RuleParser.parse(input); } @Test public void testFullLiteral() throws ParsingException { String input = "p(\"abc\"^^) ."; - RuleParser ruleParser = new RuleParser(); - ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); assertEquals(Arrays.asList(fact2), statements); } @Test public void testUnicodeLiteral() throws ParsingException { String input = "p(\"\\u0061\\u0062\\u0063\") ."; // "abc" - RuleParser ruleParser = new RuleParser(); - ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); assertEquals(Arrays.asList(fact2), statements); } @Test public void testUnicodeUri() throws ParsingException { String input = "@base . @prefix ex: . ex:\\u0073(c) ."; - RuleParser ruleParser = new RuleParser(); - ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); assertEquals(Arrays.asList(fact), statements); } @Test public void testPrefixedLiteral() throws ParsingException { String input = "@prefix xsd: <" + PrefixDeclarations.XSD + "> . " + "p(\"abc\"^^xsd:string) ."; - RuleParser ruleParser = new RuleParser(); - ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); assertEquals(Arrays.asList(fact2), statements); } @Test public void testLangStringLiteral() throws ParsingException { String input = "p(\"abc\"@en-gb) ."; - RuleParser ruleParser = new RuleParser(); - ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeConstant("\"abc\"@en-gb")); assertEquals(Arrays.asList(fact), statements); } @@ -353,46 +319,40 @@ public void testLangStringLiteral() throws ParsingException { public void testLineComments() throws ParsingException { String input = "@prefix ex: . % comment \n" + "%@prefix ex: \n" + " ex:s(ex:c) . % comment \n"; - RuleParser ruleParser = new RuleParser(); - ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); assertEquals(Arrays.asList(fact), statements); } @Test public void testPositiveLiteral() throws ParsingException { String input = "(?X,)"; - RuleParser ruleParser = new RuleParser(); - Literal literal = ruleParser.parsePositiveLiteral(input); + Literal literal = RuleParser.parsePositiveLiteral(input); assertEquals(atom1, literal); } @Test(expected = ParsingException.class) public void testPositiveLiteralError() throws ParsingException { String input = "~ (?X,)"; - RuleParser ruleParser = new RuleParser(); - ruleParser.parsePositiveLiteral(input); + RuleParser.parsePositiveLiteral(input); } @Test public void testLiteral() throws ParsingException { String input = "~ (?X,)"; - RuleParser ruleParser = new RuleParser(); - Literal literal = ruleParser.parseLiteral(input); + Literal literal = RuleParser.parseLiteral(input); assertEquals(negAtom1, literal); } @Test(expected = ParsingException.class) public void tesLiteralError() throws ParsingException { String input = "(?X, statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); CsvFileDataSource csvds = new CsvFileDataSource(new File("src/main/data/input/example.csv")); Predicate p = Expressions.makePredicate("p", 2); DataSourceDeclaration d = new DataSourceDeclarationImpl(p, csvds); @@ -402,8 +362,7 @@ public void testCsvSource() throws ParsingException, IOException { @Test public void testRdfSource() throws ParsingException, IOException { String input = "@source p(3) : load-rdf(\"src/main/data/input/example.nt.gz\") ."; - RuleParser ruleParser = new RuleParser(); - ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); RdfFileDataSource rdfds = new RdfFileDataSource(new File("src/main/data/input/example.nt.gz")); Predicate p = Expressions.makePredicate("p", 3); DataSourceDeclaration d = new DataSourceDeclarationImpl(p, rdfds); @@ -413,15 +372,13 @@ public void testRdfSource() throws ParsingException, IOException { @Test(expected = ParsingException.class) public void testRdfSourceInvalidArity() throws ParsingException, IOException { String input = "@source p(2) : load-rdf(\"src/main/data/input/example.nt.gz\") ."; - RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); + RuleParser.parse(input); } @Test public void testSparqlSource() throws ParsingException, MalformedURLException { String input = "@source p(2) : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; - RuleParser ruleParser = new RuleParser(); - ArrayList statements = new ArrayList<>(ruleParser.parse(input).getStatements()); + ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); SparqlQueryResultDataSource sparqlds = new SparqlQueryResultDataSource( new URL("https://query.wikidata.org/sparql"), "disease, doid", "?disease wdt:P699 ?doid ."); Predicate p = Expressions.makePredicate("p", 2); @@ -432,8 +389,7 @@ public void testSparqlSource() throws ParsingException, MalformedURLException { @Test(expected = ParsingException.class) public void testSparqlSourceMalformedUrl() throws ParsingException, MalformedURLException { String input = "@source p(2) : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; - RuleParser ruleParser = new RuleParser(); - ruleParser.parse(input); + RuleParser.parse(input); } } From caa6db88a39735f54fda86b445b14932feb847b0 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Wed, 21 Aug 2019 10:16:25 +0200 Subject: [PATCH 0137/1003] Added a KnowledgeBaseListener skeleton. --- .../vlog4j/core/reasoner/KnowledgeBase.java | 42 ++++++++++++------- .../core/reasoner/KnowledgeBaseListener.java | 37 ++++++++++++++++ .../vlog4j/core/reasoner/Reasoner.java | 20 ++++----- .../reasoner/implementation/VLogReasoner.java | 26 +++++++++--- .../implementation/AddDataSourceTest.java | 6 ++- 5 files changed, 97 insertions(+), 34 deletions(-) create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java index bf3cabefe..a4309c2d5 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -8,7 +8,6 @@ import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Observable; import java.util.Set; import org.apache.commons.lang3.Validate; @@ -45,11 +44,22 @@ * #L% */ -public class KnowledgeBase extends Observable { +public class KnowledgeBase { + + private final Set listeners = new HashSet<>(); private final List rules = new ArrayList<>(); private final Map> factsForPredicate = new HashMap<>(); private final Map dataSourceForPredicate = new HashMap<>(); + + public void addListener(KnowledgeBaseListener listener) { + this.listeners.add(listener); + } + + public void deleteListener(KnowledgeBaseListener listener) { + this.listeners.remove(listener); + + } /** * Adds rules to the knowledge base in the given order. The reasoner may @@ -83,7 +93,7 @@ public void addRules(@NonNull List rules) { Validate.noNullElements(rules, "Null rules are not alowed! The list contains a null at position [%d]."); this.rules.addAll(new ArrayList<>(rules)); - // TODO setChanged + //TODO compute diff // TODO notify listeners with the diff } @@ -117,8 +127,7 @@ public List getRules() { */ public void addFacts(final Fact... facts) { addFacts(Arrays.asList(facts)); - - // TODO setChanged + //TODO compute diff // TODO notify listeners with the diff } @@ -146,7 +155,7 @@ public void addFacts(final Collection facts) { validateFactTermsAreConstant(fact); final Predicate predicate = fact.getPredicate(); - validateNoDataSourceForPredicate(predicate); +// validateNoDataSourceForPredicate(predicate); this.factsForPredicate.putIfAbsent(predicate, new HashSet<>()); this.factsForPredicate.get(predicate).add(fact); @@ -175,12 +184,15 @@ public void addFacts(final Collection facts) { public void addFactsFromDataSource(Predicate predicate, DataSource dataSource) { Validate.notNull(predicate, "Null predicates are not allowed!"); Validate.notNull(dataSource, "Null dataSources are not allowed!"); - validateNoDataSourceForPredicate(predicate); - Validate.isTrue(!this.factsForPredicate.containsKey(predicate), - "Multiple data sources for the same predicate are not allowed! Facts for predicate [%s] alredy added in memory: %s", - predicate, this.factsForPredicate.get(predicate)); +// validateNoDataSourceForPredicate(predicate); +// Validate.isTrue(!this.factsForPredicate.containsKey(predicate), +// "Multiple data sources for the same predicate are not allowed! Facts for predicate [%s] alredy added in memory: %s", +// predicate, this.factsForPredicate.get(predicate)); this.dataSourceForPredicate.put(predicate, dataSource); + + //TODO compute diff + // TODO notify listeners with the diff } public boolean hasFacts() { @@ -214,11 +226,11 @@ private void validateFactTermsAreConstant(PositiveLiteral fact) { } - private void validateNoDataSourceForPredicate(final Predicate predicate) { - Validate.isTrue(!this.dataSourceForPredicate.containsKey(predicate), - "Multiple data sources for the same predicate are not allowed! Facts for predicate [%s] alredy added from data source: %s", - predicate, this.dataSourceForPredicate.get(predicate)); - } +// private void validateNoDataSourceForPredicate(final Predicate predicate) { +// Validate.isTrue(!this.dataSourceForPredicate.containsKey(predicate), +// "Multiple data sources for the same predicate are not allowed! Facts for predicate [%s] alredy added from data source: %s", +// predicate, this.dataSourceForPredicate.get(predicate)); +// } private Set collectEdbPredicates() { final Set edbPredicates = new HashSet<>(); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java new file mode 100644 index 000000000..7cf463b09 --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java @@ -0,0 +1,37 @@ +package org.semanticweb.vlog4j.core.reasoner; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.Set; + +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.Rule; + +public interface KnowledgeBaseListener { + + void onFactsAdded(Set factsAdded); + + void onDataSourceDeclarationsAdded(Set dataSourceDeclarationsAdded); + + void onRulesAdded(Set rulesAdded); + +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index afd7358f0..2d6a2561f 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -1,9 +1,7 @@ package org.semanticweb.vlog4j.core.reasoner; import java.io.IOException; -import java.util.Observer; -import org.eclipse.jdt.annotation.NonNull; import org.eclipse.jdt.annotation.Nullable; import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; @@ -90,7 +88,7 @@ * @author Irina Dragoste * */ -public interface Reasoner extends AutoCloseable, Observer { +public interface Reasoner extends AutoCloseable, KnowledgeBaseListener { /** * Factory method that to instantiate a Reasoner with an empty knowledge base. @@ -115,7 +113,7 @@ public static Reasoner getInstance() { * * @param algorithm the algorithm to be used for reasoning. */ - void setAlgorithm(@NonNull Algorithm algorithm); + void setAlgorithm(Algorithm algorithm); /** * Getter for the algorithm that will be used for reasoning over the knowledge @@ -137,7 +135,7 @@ public static Reasoner getInstance() { * seconds. If {@code null}, reasoning will not be interrupted * and will return only after (if) it has reached completion. */ - void setReasoningTimeout(@Nullable Integer seconds); + void setReasoningTimeout(Integer seconds); /** * This method returns the reasoning timeout, representing the interval (in @@ -161,7 +159,7 @@ public static Reasoner getInstance() { * rewritten before reasoning. * @throws ReasonerStateException if the reasoner has already been loaded. */ - void setRuleRewriteStrategy(@NonNull RuleRewriteStrategy ruleRewritingStrategy) throws ReasonerStateException; + void setRuleRewriteStrategy(RuleRewriteStrategy ruleRewritingStrategy) throws ReasonerStateException; /** * Getter for the strategy according to which rules will be rewritten before @@ -170,7 +168,6 @@ public static Reasoner getInstance() { * * @return the current rule re-writing strategy */ - @NonNull RuleRewriteStrategy getRuleRewriteStrategy(); /** @@ -180,7 +177,7 @@ public static Reasoner getInstance() { * @param logLevel the logging level to be set for VLog C++ resource. * @throws ReasonerStateException if the method is called on a closed reasoner. */ - void setLogLevel(@NonNull LogLevel logLevel) throws ReasonerStateException; + void setLogLevel(LogLevel logLevel) throws ReasonerStateException; /** * Returns the logging level of the internal VLog C++ resource. If no value has @@ -188,7 +185,6 @@ public static Reasoner getInstance() { * * @return the logging level of the VLog C++ resource. */ - @Nullable LogLevel getLogLevel(); /** @@ -201,7 +197,7 @@ public static Reasoner getInstance() { * to the default system output. * @throws ReasonerStateException if the method is called on a closed reasoner. */ - void setLogFile(@Nullable String filePath) throws ReasonerStateException; + void setLogFile(String filePath) throws ReasonerStateException; @@ -408,7 +404,7 @@ boolean reason() * not of type {@link TermType#CONSTANT} or * {@link TermType#VARIABLE}. */ - QueryResultIterator answerQuery(@NonNull PositiveLiteral query, boolean includeBlanks) + QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBlanks) throws ReasonerStateException; // TODO add examples to query javadoc @@ -465,7 +461,7 @@ QueryResultIterator answerQuery(@NonNull PositiveLiteral query, boolean includeB * */ // TODO update javadoc with return type - MaterialisationState exportQueryAnswersToCsv(@NonNull PositiveLiteral query, @NonNull String csvFilePath, + MaterialisationState exportQueryAnswersToCsv(PositiveLiteral query, String csvFilePath, boolean includeBlanks) throws ReasonerStateException, IOException; diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 35e1024ec..dcc8c2818 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -4,7 +4,6 @@ import java.util.Formatter; import java.util.HashSet; import java.util.Map; -import java.util.Observable; import java.util.Set; import org.apache.commons.lang3.Validate; @@ -12,8 +11,11 @@ import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.reasoner.AcyclicityNotion; import org.semanticweb.vlog4j.core.reasoner.Algorithm; import org.semanticweb.vlog4j.core.reasoner.CyclicityResult; @@ -78,7 +80,7 @@ public class VLogReasoner implements Reasoner { public VLogReasoner(KnowledgeBase knowledgeBase) { super(); this.knowledgeBase = knowledgeBase; - this.knowledgeBase.addObserver(this); + this.knowledgeBase.addListener(this); } @Override @@ -325,7 +327,8 @@ public void resetReasoner() throws ReasonerStateException { public void close() { this.reasonerState = ReasonerState.AFTER_CLOSING; - this.knowledgeBase.deleteObserver(this); + // TODO delete listener + this.knowledgeBase.deleteListener(this); this.vLog.stop(); } @@ -442,10 +445,21 @@ public CyclicityResult checkForCycles() throws ReasonerStateException, NotStarte } @Override - public void update(Observable o, Object arg) { - // TODO update materialisation state for query answering - // TODO compute KB diff + public void onFactsAdded(Set factsAdded) { + // TODO Auto-generated method stub + + } + @Override + public void onDataSourceDeclarationsAdded(Set dataSourceDeclarationsAdded) { + // TODO Auto-generated method stub + + } + + @Override + public void onRulesAdded(Set rulesAdded) { + // TODO Auto-generated method stub + } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java index 4f7077a81..8b3ee180d 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java @@ -130,6 +130,8 @@ public void testAddDataSourceAfterReasoning() throws ReasonerStateException, Edb } } + //FIXME decide how to handle datasources with multiple predicates + @Ignore // TODO move to a test class for KnowledgeBase @Test(expected = IllegalArgumentException.class) public void testAddDataSourceNoMultipleDataSourcesForPredicate() throws ReasonerStateException, IOException { @@ -140,7 +142,9 @@ public void testAddDataSourceNoMultipleDataSourcesForPredicate() throws Reasoner kb.addFactsFromDataSource(predicate, dataSource); kb.addFactsFromDataSource(predicate, dataSource); } - + + //FIXME decide how to handle datasources with multiple predicates + @Ignore // TODO move to a test class for KnowledgeBase @Test(expected = IllegalArgumentException.class) public void testAddDataSourceNoFactsForPredicate() throws ReasonerStateException, IOException { From ad9156d90f7e70d92f18416f8368ec186273f4dd Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Wed, 21 Aug 2019 11:38:20 +0200 Subject: [PATCH 0138/1003] added onStatementsAdded and onStatementAdded events --- .../vlog4j/core/reasoner/KnowledgeBase.java | 47 +++++++++++++++++-- .../core/reasoner/KnowledgeBaseListener.java | 10 ++-- .../reasoner/implementation/VLogReasoner.java | 16 ++----- 3 files changed, 50 insertions(+), 23 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java index 55b51dda9..fd9643a07 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -141,16 +141,25 @@ public Void visit(DataSourceDeclaration statement) { * Index structure that organises all facts by their predicate. */ final Map> factsByPredicate = new HashMap<>(); + /** * Index structure that holds all data source declarations of this knowledge * base. */ final Set dataSourceDeclarations = new HashSet<>(); + /** + * Registers a listener for changes on the knowledge base + * @param listener + */ public void addListener(KnowledgeBaseListener listener) { this.listeners.add(listener); } + /** + * Unregisters given listener from changes on the knowledge base + * @param listener + */ public void deleteListener(KnowledgeBaseListener listener) { this.listeners.remove(listener); @@ -158,14 +167,19 @@ public void deleteListener(KnowledgeBaseListener listener) { /** * Adds a single statement to the knowledge base. - * + * @return true, if the knowledge base has changed. * @param statement */ - public void addStatement(Statement statement) { + public boolean addStatement(Statement statement) { Validate.notNull(statement, "Statement cannot be Null."); if (!this.statements.contains(statement) && statement.accept(this.addStatementVisitor)) { this.statements.add(statement); + + notifyListenersOnStatementAdded(statement); + + return true; } + return false; } /** @@ -174,9 +188,16 @@ public void addStatement(Statement statement) { * @param statements */ public void addStatements(Collection statements) { + final Set addedStatements = new HashSet<>(); + for (final Statement statement : statements) { - addStatement(statement); + if (addStatement(statement)) { + addedStatements.add(statement); + } } + + notifyListenersOnStatementsAdded(addedStatements); + } /** @@ -185,8 +206,26 @@ public void addStatements(Collection statements) { * @param statements */ public void addStatements(Statement... statements) { + final Set addedStatements = new HashSet<>(); + for (final Statement statement : statements) { - addStatement(statement); + if (addStatement(statement)) { + addedStatements.add(statement); + } + } + + notifyListenersOnStatementsAdded(addedStatements); + } + + private void notifyListenersOnStatementsAdded(final Set addedStatements) { + for (final KnowledgeBaseListener listener : this.listeners) { + listener.onStatementsAdded(addedStatements); + } + } + + private void notifyListenersOnStatementAdded(final Statement addedStatements) { + for (final KnowledgeBaseListener listener : this.listeners) { + listener.onStatementAdded(addedStatements); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java index 7cf463b09..cae99a5a2 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java @@ -22,16 +22,12 @@ import java.util.Set; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Statement; public interface KnowledgeBaseListener { - void onFactsAdded(Set factsAdded); + void onStatementAdded(Statement statementAdded); - void onDataSourceDeclarationsAdded(Set dataSourceDeclarationsAdded); - - void onRulesAdded(Set rulesAdded); + void onStatementsAdded(Set statementsAdded); } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index dc9011e32..198313a9d 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -11,11 +11,9 @@ import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.reasoner.AcyclicityNotion; import org.semanticweb.vlog4j.core.reasoner.Algorithm; import org.semanticweb.vlog4j.core.reasoner.CyclicityResult; @@ -443,19 +441,13 @@ public CyclicityResult checkForCycles() throws ReasonerStateException, NotStarte } @Override - public void onFactsAdded(Set factsAdded) { - // TODO Auto-generated method stub - - } - - @Override - public void onDataSourceDeclarationsAdded(Set dataSourceDeclarationsAdded) { - // TODO Auto-generated method stub + public void onStatementsAdded(Set statementsAdded) { + // TODO change materialisation state } @Override - public void onRulesAdded(Set rulesAdded) { + public void onStatementAdded(Statement statementAdded) { // TODO Auto-generated method stub } From c2a2ee96181d060280a6f20b7cd52e950b227b70 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Wed, 21 Aug 2019 12:00:12 +0200 Subject: [PATCH 0139/1003] update materialisation state on statements added --- .../reasoner/implementation/VLogReasoner.java | 46 ++++++++++++++++--- 1 file changed, 39 insertions(+), 7 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 198313a9d..89840f16c 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -78,7 +78,7 @@ public class VLogReasoner implements Reasoner { public VLogReasoner(KnowledgeBase knowledgeBase) { super(); this.knowledgeBase = knowledgeBase; - this.knowledgeBase.addListener(this); + this.knowledgeBase.addListener(this); } @Override @@ -323,8 +323,8 @@ public void resetReasoner() throws ReasonerStateException { @Override public void close() { - this.reasonerState = ReasonerState.AFTER_CLOSING; - this.knowledgeBase.deleteListener(this); + this.reasonerState = ReasonerState.AFTER_CLOSING; + this.knowledgeBase.deleteListener(this); this.vLog.stop(); } @@ -442,14 +442,46 @@ public CyclicityResult checkForCycles() throws ReasonerStateException, NotStarte @Override public void onStatementsAdded(Set statementsAdded) { - // TODO change materialisation state - + updateMaterialisationStateOnStatementsAdded(statementsAddedInvalidateMaterialisation(statementsAdded)); } @Override public void onStatementAdded(Statement statementAdded) { - // TODO Auto-generated method stub - + updateMaterialisationStateOnStatementsAdded(statementAddedInvalidatesMaterialisation(statementAdded)); + } + + private boolean statementsAddedInvalidateMaterialisation(Set statementsAdded) { + // if statements contain Facts or DataSourceDeclarations for predicates that + // appear as negated in rules, return true + // TODO implement + return false; + + } + + private boolean statementAddedInvalidatesMaterialisation(Statement statementAdded) { + // if statement is a Facts or a DataSourceDeclarations for predicates that + // appear as negated in rules, return true + // TODO implement + return false; + } + + private void updateMaterialisationStateOnStatementsAdded(boolean materialisationInvalidated) { + switch (materialisationState) { + case WRONG: + // added statements do not change the WRONG state + break; + + case INCOMPLETE: + case COMPLETE: + if (materialisationInvalidated) { + this.materialisationState = materialisationInvalidated ? MaterialisationState.WRONG + : MaterialisationState.INCOMPLETE; + } + break; + + default: + break; + } } } From 6df05ed10bc2829edf1dbf1590f654e502c81f40 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 21 Aug 2019 12:02:32 +0200 Subject: [PATCH 0140/1003] add dummy DataSourceDeclaration --- .../reasoner/implementation/VLogReasoner.java | 61 +++++++++++++++++-- 1 file changed, 56 insertions(+), 5 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 198313a9d..9dda09143 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -11,9 +11,11 @@ import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Statement; +import org.semanticweb.vlog4j.core.model.api.StatementVisitor; import org.semanticweb.vlog4j.core.reasoner.AcyclicityNotion; import org.semanticweb.vlog4j.core.reasoner.Algorithm; import org.semanticweb.vlog4j.core.reasoner.CyclicityResult; @@ -56,6 +58,55 @@ public class VLogReasoner implements Reasoner { + /** + * Dummy data source declaration for predicates for which we have explicit local + * facts in the input. + * + * @author Markus Kroetzsch + * + */ + class LocalFactsDataSourceDeclaration implements DataSourceDeclaration { + + final Predicate predicate; + + public LocalFactsDataSourceDeclaration(Predicate predicate) { + this.predicate = predicate; + } + + @Override + public T accept(StatementVisitor statementVisitor) { + return statementVisitor.visit(this); + } + + @Override + public Predicate getPredicate() { + return this.predicate; + } + + @Override + public DataSource getDataSource() { + return null; + } + + @Override + public int hashCode() { + return predicate.hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + LocalFactsDataSourceDeclaration other = (LocalFactsDataSourceDeclaration) obj; + return predicate.equals(other.predicate); + } + + } + private static Logger LOGGER = LoggerFactory.getLogger(VLogReasoner.class); private final KnowledgeBase knowledgeBase; @@ -78,7 +129,7 @@ public class VLogReasoner implements Reasoner { public VLogReasoner(KnowledgeBase knowledgeBase) { super(); this.knowledgeBase = knowledgeBase; - this.knowledgeBase.addListener(this); + this.knowledgeBase.addListener(this); } @Override @@ -323,8 +374,8 @@ public void resetReasoner() throws ReasonerStateException { @Override public void close() { - this.reasonerState = ReasonerState.AFTER_CLOSING; - this.knowledgeBase.deleteListener(this); + this.reasonerState = ReasonerState.AFTER_CLOSING; + this.knowledgeBase.deleteListener(this); this.vLog.stop(); } @@ -443,13 +494,13 @@ public CyclicityResult checkForCycles() throws ReasonerStateException, NotStarte @Override public void onStatementsAdded(Set statementsAdded) { // TODO change materialisation state - + } @Override public void onStatementAdded(Statement statementAdded) { // TODO Auto-generated method stub - + } } From 9fc0c60d3c1cc2d55fc02e349607fe257806721c Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 21 Aug 2019 12:02:44 +0200 Subject: [PATCH 0141/1003] Auto format --- .../vlog4j/core/reasoner/KnowledgeBase.java | 35 ++++++++++--------- 1 file changed, 19 insertions(+), 16 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java index fd9643a07..12398a0e0 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -53,9 +53,9 @@ * */ public class KnowledgeBase { - + private final Set listeners = new HashSet<>(); - + /** * Auxiliary class to process {@link Statement}s when added to the knowledge * base. Returns true if a statement was added successfully. @@ -129,7 +129,7 @@ public Void visit(DataSourceDeclaration statement) { * The primary storage for the contents of the knowledge base. */ final LinkedHashSet statements = new LinkedHashSet<>(); - + /** * Known prefixes that can be used to pretty-print the contents of the knowledge * base. We try to preserve user-provided prefixes found in files when loading @@ -141,32 +141,35 @@ public Void visit(DataSourceDeclaration statement) { * Index structure that organises all facts by their predicate. */ final Map> factsByPredicate = new HashMap<>(); - + /** * Index structure that holds all data source declarations of this knowledge * base. */ final Set dataSourceDeclarations = new HashSet<>(); - + /** * Registers a listener for changes on the knowledge base + * * @param listener */ public void addListener(KnowledgeBaseListener listener) { this.listeners.add(listener); } - + /** * Unregisters given listener from changes on the knowledge base + * * @param listener */ public void deleteListener(KnowledgeBaseListener listener) { this.listeners.remove(listener); - + } /** * Adds a single statement to the knowledge base. + * * @return true, if the knowledge base has changed. * @param statement */ @@ -174,9 +177,9 @@ public boolean addStatement(Statement statement) { Validate.notNull(statement, "Statement cannot be Null."); if (!this.statements.contains(statement) && statement.accept(this.addStatementVisitor)) { this.statements.add(statement); - + notifyListenersOnStatementAdded(statement); - + return true; } return false; @@ -189,15 +192,15 @@ public boolean addStatement(Statement statement) { */ public void addStatements(Collection statements) { final Set addedStatements = new HashSet<>(); - + for (final Statement statement : statements) { if (addStatement(statement)) { addedStatements.add(statement); } } - + notifyListenersOnStatementsAdded(addedStatements); - + } /** @@ -207,13 +210,13 @@ public void addStatements(Collection statements) { */ public void addStatements(Statement... statements) { final Set addedStatements = new HashSet<>(); - + for (final Statement statement : statements) { if (addStatement(statement)) { addedStatements.add(statement); } } - + notifyListenersOnStatementsAdded(addedStatements); } @@ -222,7 +225,7 @@ private void notifyListenersOnStatementsAdded(final Set addedStatemen listener.onStatementsAdded(addedStatements); } } - + private void notifyListenersOnStatementAdded(final Statement addedStatements) { for (final KnowledgeBaseListener listener : this.listeners) { listener.onStatementAdded(addedStatements); @@ -344,5 +347,5 @@ Set collectIdbPredicates() { public Collection getStatements() { return this.statements; } - + } From 4ccd8c50389c6d838c48983a824c7f23193e15fe Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 21 Aug 2019 15:31:43 +0200 Subject: [PATCH 0142/1003] Make KB iterable --- .../semanticweb/vlog4j/core/reasoner/KnowledgeBase.java | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java index 12398a0e0..69d3d4f9c 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -5,6 +5,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; @@ -52,7 +53,7 @@ * @author Markus Kroetzsch * */ -public class KnowledgeBase { +public class KnowledgeBase implements Iterable { private final Set listeners = new HashSet<>(); @@ -348,4 +349,9 @@ public Collection getStatements() { return this.statements; } + @Override + public Iterator iterator() { + return this.statements.iterator(); + } + } From 689ba93da000c6fc08f0ff4d51ed4b432eae79a8 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 21 Aug 2019 15:43:08 +0200 Subject: [PATCH 0143/1003] use tail recursion in transitive closure --- .../org/semanticweb/vlog4j/examples/SimpleReasoningExample.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java index 88d27b830..ca9df94b8 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java @@ -58,7 +58,7 @@ public static void main(final String[] args) throws IOException { + "zipLocation(\"01069\", dresden) . \n" // + "% --- Standard recursion: locations are transitive --- \n" // + "locatedIn(?X,?Y) :- location(?X,?Y) . \n" // - + "locatedIn(?X,?Z) :- locatedIn(?X,?Y), locatedIn(?Y,?Z) . \n" // + + "locatedIn(?X,?Z) :- location(?X,?Y), locatedIn(?Y,?Z) . \n" // + "% --- Build address facts using the city constant --- \n" // + "address(?Uni, ?Street, ?ZIP, ?City) :- streetAddress(?Uni, ?Street, ?ZIP, ?CityName), zipLocation(?ZIP, ?City) . \n" + "% --- Value invention: universities have some address --- \n" // From a13dd7a755f1227368d144a13397a0642384592a Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 21 Aug 2019 17:01:04 +0200 Subject: [PATCH 0144/1003] Rewrite of KB loading code in VLogReasoner --- .../vlog4j/core/reasoner/KnowledgeBase.java | 55 ---- .../implementation/ModelToVLogConverter.java | 14 +- .../reasoner/implementation/VLogReasoner.java | 287 +++++++++++++----- .../LoadDataFromMemoryTest.java | 1 - .../ModelToVLogConverterTest.java | 68 +++-- .../reasoner/implementation/ReasonerTest.java | 12 - 6 files changed, 268 insertions(+), 169 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java index 69d3d4f9c..64c6a955d 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -12,10 +12,8 @@ import java.util.Set; import org.apache.commons.lang3.Validate; -import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; @@ -284,59 +282,6 @@ void addFact(Fact fact) { this.factsByPredicate.get(predicate).add(fact); } - @Deprecated - public boolean hasFacts() { - // If needed, a more elegant implementation should be used - return !this.getFacts().isEmpty() || !this.getDataSourceDeclarations().isEmpty(); - } - - @Deprecated - public Map getDataSourceForPredicate() { - // Only for temporary functionality; the one-source-per-predicate model will be - // retired and is no longer enforced in the knowledge base - final Map result = new HashMap<>(); - for (final DataSourceDeclaration dsd : getDataSourceDeclarations()) { - result.put(dsd.getPredicate(), dsd.getDataSource()); - } - return result; - } - - @Deprecated - public Map> getFactsForPredicate() { - // Check if this is really the best format to access this data - return this.factsByPredicate; - } - - @Deprecated - public Set getEdbPredicates() { - // TODO use cache - return collectEdbPredicates(); - } - - @Deprecated - public Set getIdbPredicates() { - // TODO use cache - return collectIdbPredicates(); - } - - Set collectEdbPredicates() { - // not an efficient or elegant implementation - final Set edbPredicates = new HashSet<>(); - edbPredicates.addAll(this.getDataSourceForPredicate().keySet()); - edbPredicates.addAll(this.factsByPredicate.keySet()); - return edbPredicates; - } - - Set collectIdbPredicates() { - final Set idbPredicates = new HashSet<>(); - for (final Rule rule : this.getRules()) { - for (final Literal headAtom : rule.getHead()) { - idbPredicates.add(headAtom.getPredicate()); - } - } - return idbPredicates; - } - /** * Returns all {@link Statement}s of this knowledge base. * diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverter.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverter.java index 298a8f24c..7ca9246ae 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverter.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverter.java @@ -20,12 +20,11 @@ * #L% */ - - import java.util.Collection; import java.util.List; import org.semanticweb.vlog4j.core.model.api.Conjunction; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; @@ -34,7 +33,6 @@ import org.semanticweb.vlog4j.core.reasoner.LogLevel; import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; - /** * Utility class with static methods for converting from VLog API model objects * ({@code org.semanticweb.vlog4j.core.model}) to internal VLog model objects @@ -65,7 +63,7 @@ static karmaresearch.vlog.Term[] toVLogTermArray(final List terms) { return vLogTerms; } - static String[][] toVLogFactTuples(final Collection facts) { + static String[][] toVLogFactTuples(final Collection facts) { final String[][] tuples = new String[facts.size()][]; int i = 0; for (final PositiveLiteral atom : facts) { @@ -91,8 +89,7 @@ static String[] toVLogFactTuple(final PositiveLiteral fact) { /** * Internal String representation that uniquely identifies a {@link Predicate}. * - * @param predicate - * a {@link Predicate} + * @param predicate a {@link Predicate} * @return String representation corresponding to given predicate name and * arity. */ @@ -104,7 +101,8 @@ static String toVLogPredicate(Predicate predicate) { static karmaresearch.vlog.Atom toVLogAtom(final Literal literal) { final karmaresearch.vlog.Term[] vLogTerms = toVLogTermArray(literal.getTerms()); final String vLogPredicate = toVLogPredicate(literal.getPredicate()); - final karmaresearch.vlog.Atom vLogAtom = new karmaresearch.vlog.Atom(vLogPredicate,literal.isNegated(), vLogTerms); + final karmaresearch.vlog.Atom vLogAtom = new karmaresearch.vlog.Atom(vLogPredicate, literal.isNegated(), + vLogTerms); return vLogAtom; } @@ -124,7 +122,7 @@ static karmaresearch.vlog.Rule toVLogRule(final Rule rule) { return new karmaresearch.vlog.Rule(vLogHead, vLogBody); } - static karmaresearch.vlog.Rule[] toVLogRuleArray(final List rules) { + static karmaresearch.vlog.Rule[] toVLogRuleArray(final Collection rules) { final karmaresearch.vlog.Rule[] vLogRules = new karmaresearch.vlog.Rule[rules.size()]; int i = 0; for (final Rule rule : rules) { diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 14d4c9798..3d9783b1e 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -1,8 +1,12 @@ package org.semanticweb.vlog4j.core.reasoner.implementation; import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; import java.util.Formatter; +import java.util.HashMap; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Set; @@ -12,10 +16,19 @@ import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.StatementVisitor; +import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; +import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; +import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; +import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; +import org.semanticweb.vlog4j.core.model.implementation.VariableImpl; import org.semanticweb.vlog4j.core.reasoner.AcyclicityNotion; import org.semanticweb.vlog4j.core.reasoner.Algorithm; import org.semanticweb.vlog4j.core.reasoner.CyclicityResult; @@ -57,6 +70,7 @@ */ public class VLogReasoner implements Reasoner { + private static Logger LOGGER = LoggerFactory.getLogger(VLogReasoner.class); /** * Dummy data source declaration for predicates for which we have explicit local @@ -107,11 +121,113 @@ public boolean equals(Object obj) { } - private static Logger LOGGER = LoggerFactory.getLogger(VLogReasoner.class); + /** + * Local visitor implementation for processing statements upon loading. Internal + * index structures are updated based on the statements that are detected. + * + * @author Markus Kroetzsch + * + */ + class LoadKbVisitor implements StatementVisitor { + + public void clearIndexes() { + edbPredicates.clear(); + idbPredicates.clear(); + aliasedEdbPredicates.clear(); + aliasesForEdbPredicates.clear(); + directEdbFacts.clear(); + rules.clear(); + } - private final KnowledgeBase knowledgeBase; + @Override + public Void visit(Fact statement) { + Predicate predicate = statement.getPredicate(); + registerEdbDeclaration(new LocalFactsDataSourceDeclaration(predicate)); + if (!directEdbFacts.containsKey(predicate)) { + List facts = new ArrayList(); + facts.add(statement); + directEdbFacts.put(predicate, facts); + } else { + directEdbFacts.get(predicate).add(statement); + } + return null; + } + + @Override + public Void visit(Rule statement) { + rules.add(statement); + for (PositiveLiteral positiveLiteral : statement.getHead()) { + Predicate predicate = positiveLiteral.getPredicate(); + if (!idbPredicates.contains(predicate)) { + if (edbPredicates.containsKey(predicate)) { + addEdbAlias(edbPredicates.get(predicate)); + edbPredicates.remove(predicate); + } + idbPredicates.add(predicate); + } + } + return null; + } + + @Override + public Void visit(DataSourceDeclaration statement) { + registerEdbDeclaration(statement); + return null; + } + + void registerEdbDeclaration(DataSourceDeclaration dataSourceDeclaration) { + Predicate predicate = dataSourceDeclaration.getPredicate(); + if (idbPredicates.contains(predicate) || aliasedEdbPredicates.contains(predicate)) { + if (!aliasesForEdbPredicates.containsKey(dataSourceDeclaration)) { + addEdbAlias(dataSourceDeclaration); + } + } else { + DataSourceDeclaration currentMainDeclaration = edbPredicates.get(predicate); + if (currentMainDeclaration == null) { + edbPredicates.put(predicate, dataSourceDeclaration); + } else if (!(currentMainDeclaration.equals(dataSourceDeclaration))) { + addEdbAlias(currentMainDeclaration); + addEdbAlias(dataSourceDeclaration); + edbPredicates.remove(predicate); + } // else: predicate already known to have local facts (only) + } + } + + void addEdbAlias(DataSourceDeclaration dataSourceDeclaration) { + final Predicate predicate = dataSourceDeclaration.getPredicate(); + Predicate aliasPredicate; + if (dataSourceDeclaration instanceof LocalFactsDataSourceDeclaration) { + aliasPredicate = new PredicateImpl(predicate.getName() + "_FACT", predicate.getArity()); + } else { + aliasPredicate = new PredicateImpl(predicate.getName() + "_" + predicate.hashCode(), + predicate.getArity()); + } + aliasesForEdbPredicates.put(dataSourceDeclaration, aliasPredicate); + aliasedEdbPredicates.add(predicate); + + final List terms = new ArrayList<>(); + for (int i = 1; i <= predicate.getArity(); i++) { + terms.add(new VariableImpl("X" + i)); + } + final Literal body = new PositiveLiteralImpl(aliasPredicate, terms); + final PositiveLiteral head = new PositiveLiteralImpl(predicate, terms); + final Rule rule = new RuleImpl(new ConjunctionImpl(Arrays.asList(head)), + new ConjunctionImpl(Arrays.asList(body))); + rules.add(rule); + } + + } + + final KnowledgeBase knowledgeBase; + final VLog vLog = new VLog(); + + final Map aliasesForEdbPredicates = new HashMap<>(); + final Set idbPredicates = new HashSet<>(); + final Map edbPredicates = new HashMap<>(); + final Set aliasedEdbPredicates = new HashSet<>(); + final Map> directEdbFacts = new HashMap<>(); + final Set rules = new HashSet<>(); - private final VLog vLog = new VLog(); private ReasonerState reasonerState = ReasonerState.BEFORE_LOADING; private MaterialisationState materialisationState = MaterialisationState.INCOMPLETE; @@ -186,85 +302,115 @@ public RuleRewriteStrategy getRuleRewriteStrategy() { @Override public void load() throws EdbIdbSeparationException, IOException, IncompatiblePredicateArityException, ReasonerStateException { - if (this.reasonerState.equals(ReasonerState.AFTER_CLOSING)) { + if (this.reasonerState == ReasonerState.AFTER_CLOSING) { throw new ReasonerStateException(this.reasonerState, "Loading is not allowed after closing."); } - if (this.reasonerState != ReasonerState.BEFORE_LOADING) { - // TODO check if this is correct. - LOGGER.warn("This method call is ineffective: the Reasoner has already been loaded."); - } else { - validateEdbIdbSeparation(); - this.reasonerState = ReasonerState.AFTER_LOADING; - - if (!this.knowledgeBase.hasFacts()) { - LOGGER.warn("No facts have been provided."); - } + LoadKbVisitor visitor = new LoadKbVisitor(); + visitor.clearIndexes(); + for (Statement statement : knowledgeBase) { + statement.accept(visitor); + } +// System.out.println("\nEDB: " + edbPredicates); +// System.out.println("\nIDB: " + idbPredicates); +// System.out.println("\nEDB with aliases: " + aliasedEdbPredicates); +// System.out.println("\nAliases for EDBs: " + aliasesForEdbPredicates); - try { - this.vLog.start(generateDataSourcesConfig(), false); - } catch (final AlreadyStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); - } catch (final EDBConfigurationException e) { - throw new RuntimeException("Invalid data sources configuration.", e); - } + if (edbPredicates.isEmpty() && aliasedEdbPredicates.isEmpty()) { + LOGGER.warn("No facts have been provided."); + } - validateDataSourcePredicateArities(); + try { + this.vLog.start(getDataSourceConfigurationString(), false); + } catch (final AlreadyStartedException e) { + throw new RuntimeException("Inconsistent reasoner state.", e); + } catch (final EDBConfigurationException e) { + throw new RuntimeException("Invalid data sources configuration.", e); + } + // TODO: can't we set this earlier? Why here? + setLogLevel(this.internalLogLevel); - loadInMemoryFacts(); + validateDataSourcePredicateArities(); - if (this.knowledgeBase.getRules().isEmpty()) { - LOGGER.warn("No rules have been provided for reasoning."); - } else { - loadRules(); - } + loadFacts(); + loadRules(); - setLogLevel(this.internalLogLevel); - } + this.reasonerState = ReasonerState.AFTER_LOADING; } - String generateDataSourcesConfig() { + String getDataSourceConfigurationString() { final StringBuilder configStringBuilder = new StringBuilder(); + final Formatter formatter = new Formatter(configStringBuilder); int dataSourceIndex = 0; - for (final Predicate predicate : this.knowledgeBase.getDataSourceForPredicate().keySet()) { - final DataSource dataSource = this.knowledgeBase.getDataSourceForPredicate().get(predicate); - try (final Formatter formatter = new Formatter(configStringBuilder);) { - formatter.format(dataSource.toConfigString(), dataSourceIndex, + for (final Predicate predicate : this.edbPredicates.keySet()) { + final DataSourceDeclaration dataSourceDeclaration = this.edbPredicates.get(predicate); + if (dataSourceDeclaration.getDataSource() != null) { + formatter.format(dataSourceDeclaration.getDataSource().toConfigString(), dataSourceIndex, ModelToVLogConverter.toVLogPredicate(predicate)); } dataSourceIndex++; } - return configStringBuilder.toString(); - } - - private void validateEdbIdbSeparation() throws EdbIdbSeparationException { - final Set edbPredicates = this.knowledgeBase.getEdbPredicates(); - final Set idbPredicates = this.knowledgeBase.getIdbPredicates(); - final Set intersection = new HashSet<>(edbPredicates); - intersection.retainAll(idbPredicates); - if (!intersection.isEmpty()) { - throw new EdbIdbSeparationException(intersection); + for (final DataSourceDeclaration dataSourceDeclaration : this.aliasesForEdbPredicates.keySet()) { + Predicate aliasPredicate = this.aliasesForEdbPredicates.get(dataSourceDeclaration); + if (dataSourceDeclaration.getDataSource() != null) { + formatter.format(dataSourceDeclaration.getDataSource().toConfigString(), dataSourceIndex, + ModelToVLogConverter.toVLogPredicate(aliasPredicate)); + } + dataSourceIndex++; } + formatter.close(); + return configStringBuilder.toString(); } - private void validateDataSourcePredicateArities() throws IncompatiblePredicateArityException { - final Map dataSourceForPredicate = this.knowledgeBase.getDataSourceForPredicate(); - for (final Predicate predicate : dataSourceForPredicate.keySet()) { - final int dataSourcePredicateArity; - try { - dataSourcePredicateArity = this.vLog.getPredicateArity(ModelToVLogConverter.toVLogPredicate(predicate)); - } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); - } +// String generateDataSourcesConfig() { +// final StringBuilder configStringBuilder = new StringBuilder(); +// int dataSourceIndex = 0; +// for (final Predicate predicate : this.knowledgeBase.getDataSourceForPredicate().keySet()) { +// final DataSource dataSource = this.knowledgeBase.getDataSourceForPredicate().get(predicate); +// try (final Formatter formatter = new Formatter(configStringBuilder)) { +// formatter.format(dataSource.toConfigString(), dataSourceIndex, +// ModelToVLogConverter.toVLogPredicate(predicate)); +// } +// dataSourceIndex++; +// } +// return configStringBuilder.toString(); +// } + +// private void validateEdbIdbSeparation() throws EdbIdbSeparationException { +// final Set edbPredicates = this.knowledgeBase.getEdbPredicates(); +// final Set idbPredicates = this.knowledgeBase.getIdbPredicates(); +// final Set intersection = new HashSet<>(edbPredicates); +// intersection.retainAll(idbPredicates); +// if (!intersection.isEmpty()) { +// throw new EdbIdbSeparationException(intersection); +// } +// } + + void validateDataSourcePredicateArities() throws IncompatiblePredicateArityException { + for (final Predicate predicate : edbPredicates.keySet()) { + validateDataSourcePredicateArity(predicate, edbPredicates.get(predicate).getDataSource()); + } + for (final DataSourceDeclaration dataSourceDeclaration : aliasesForEdbPredicates.keySet()) { + validateDataSourcePredicateArity(aliasesForEdbPredicates.get(dataSourceDeclaration), + dataSourceDeclaration.getDataSource()); + } + } + + void validateDataSourcePredicateArity(Predicate predicate, DataSource dataSource) + throws IncompatiblePredicateArityException { + if (dataSource == null) + return; + try { + final int dataSourcePredicateArity = this.vLog + .getPredicateArity(ModelToVLogConverter.toVLogPredicate(predicate)); if (dataSourcePredicateArity == -1) { - LOGGER.warn("Data source {} for predicate {} is empty: ", dataSourceForPredicate.get(predicate), - predicate); + LOGGER.warn("Data source {} for predicate {} is empty: ", dataSource, predicate); } else if (predicate.getArity() != dataSourcePredicateArity) { - throw new IncompatiblePredicateArityException(predicate, dataSourcePredicateArity, - dataSourceForPredicate.get(predicate)); + throw new IncompatiblePredicateArityException(predicate, dataSourcePredicateArity, dataSource); } + } catch (final NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state.", e); } - } @Override @@ -379,24 +525,25 @@ public void close() { this.vLog.stop(); } - private void loadInMemoryFacts() { - final Map> factsForPredicate = this.knowledgeBase.getFactsForPredicate(); - for (final Predicate predicate : factsForPredicate.keySet()) { - final Set facts = factsForPredicate.get(predicate); - - final String vLogPredicate = ModelToVLogConverter.toVLogPredicate(predicate); - final String[][] tuplesForPredicate = ModelToVLogConverter.toVLogFactTuples(facts); + void loadFacts() { + for (final Predicate predicate : directEdbFacts.keySet()) { + Predicate aliasPredicate; + if (edbPredicates.containsKey(predicate)) { + aliasPredicate = predicate; + } else { + aliasPredicate = aliasesForEdbPredicates.get(new LocalFactsDataSourceDeclaration(predicate)); + } try { - this.vLog.addData(vLogPredicate, tuplesForPredicate); + this.vLog.addData(ModelToVLogConverter.toVLogPredicate(aliasPredicate), + ModelToVLogConverter.toVLogFactTuples(directEdbFacts.get(predicate))); } catch (final EDBConfigurationException e) { throw new RuntimeException("Invalid data sources configuration.", e); } } } - private void loadRules() { - final karmaresearch.vlog.Rule[] vLogRuleArray = ModelToVLogConverter - .toVLogRuleArray(this.knowledgeBase.getRules()); + void loadRules() { + final karmaresearch.vlog.Rule[] vLogRuleArray = ModelToVLogConverter.toVLogRuleArray(rules); final karmaresearch.vlog.VLog.RuleRewriteStrategy vLogRuleRewriteStrategy = ModelToVLogConverter .toVLogRuleRewriteStrategy(this.ruleRewriteStrategy); try { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java index 92ebbab92..75af663b4 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java @@ -30,7 +30,6 @@ import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.BlankImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverterTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverterTest.java index c928271df..44c4544f2 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverterTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverterTest.java @@ -32,6 +32,7 @@ import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.Blank; import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Rule; @@ -46,7 +47,8 @@ public class ModelToVLogConverterTest { @Test public void testToVLogTermVariable() { final Variable variable = Expressions.makeVariable("var"); - final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, "var"); + final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term( + karmaresearch.vlog.Term.TermType.VARIABLE, "var"); final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(variable); @@ -59,7 +61,8 @@ public void testToVLogTermVariable() { @Test public void testToVLogTermConstant() { final Constant constant = Expressions.makeConstant("const"); - final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, "const"); + final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term( + karmaresearch.vlog.Term.TermType.CONSTANT, "const"); final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(constant); @@ -73,7 +76,8 @@ public void testToVLogTermConstant() { @Test public void testToVLogTermBlank() { final Blank blank = new BlankImpl("blank"); - final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, "blank"); + final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term( + karmaresearch.vlog.Term.TermType.BLANK, "blank"); final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(blank); @@ -92,11 +96,16 @@ public void testToVLogTermArray() { final Blank bx = new BlankImpl("x"); final List terms = Arrays.asList(vx, cx, vxToo, bx, vy); - final karmaresearch.vlog.Term expectedVx = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, "x"); - final karmaresearch.vlog.Term expectedVy = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, "y"); - final karmaresearch.vlog.Term expectedCx = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, "x"); - final karmaresearch.vlog.Term expectedBx = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, "x"); - final karmaresearch.vlog.Term[] expectedTermArray = { expectedVx, expectedCx, expectedVx, expectedBx, expectedVy }; + final karmaresearch.vlog.Term expectedVx = new karmaresearch.vlog.Term( + karmaresearch.vlog.Term.TermType.VARIABLE, "x"); + final karmaresearch.vlog.Term expectedVy = new karmaresearch.vlog.Term( + karmaresearch.vlog.Term.TermType.VARIABLE, "y"); + final karmaresearch.vlog.Term expectedCx = new karmaresearch.vlog.Term( + karmaresearch.vlog.Term.TermType.CONSTANT, "x"); + final karmaresearch.vlog.Term expectedBx = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, + "x"); + final karmaresearch.vlog.Term[] expectedTermArray = { expectedVx, expectedCx, expectedVx, expectedBx, + expectedVy }; final karmaresearch.vlog.Term[] vLogTermArray = ModelToVLogConverter.toVLogTermArray(terms); assertArrayEquals(expectedTermArray, vLogTermArray); @@ -116,8 +125,8 @@ public void testToVLogFactTuples() { final Constant c1 = Expressions.makeConstant("1"); final Constant c2 = Expressions.makeConstant("2"); final Constant c3 = Expressions.makeConstant("3"); - final PositiveLiteral atom1 = Expressions.makePositiveLiteral("p1", c1); - final PositiveLiteral atom2 = Expressions.makePositiveLiteral("p2", c2, c3); + final Fact atom1 = Expressions.makeFact("p1", Arrays.asList(c1)); + final Fact atom2 = Expressions.makeFact("p2", Arrays.asList(c2, c3)); final String[][] vLogTuples = ModelToVLogConverter.toVLogFactTuples(Arrays.asList(atom1, atom2)); @@ -139,9 +148,12 @@ public void testToVLogAtom() { final Blank b = new BlankImpl("_:b"); final PositiveLiteral atom = Expressions.makePositiveLiteral("pred", c, x, b); - final karmaresearch.vlog.Term expectedC = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, "c"); - final karmaresearch.vlog.Term expectedX = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, "x"); - final karmaresearch.vlog.Term expectedB = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, "_:b"); + final karmaresearch.vlog.Term expectedC = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, + "c"); + final karmaresearch.vlog.Term expectedX = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, + "x"); + final karmaresearch.vlog.Term expectedB = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, + "_:b"); final String expectedPredicateName = "pred" + ModelToVLogConverter.PREDICATE_ARITY_SUFFIX_SEPARATOR + 3; final karmaresearch.vlog.Term[] expectedTerms = { expectedC, expectedX, expectedB }; @@ -168,31 +180,41 @@ public void testToVLogRuleArray() { final PositiveLiteral atomQ2XV = Expressions.makePositiveLiteral("q2", x, v); final Rule rule2 = Expressions.makeRule(atomQ2XV, atomQ1XWZ, atomQYW, atomQXYZ); - final karmaresearch.vlog.Term expX = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, "x"); - final karmaresearch.vlog.Term expY = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, "y"); - final karmaresearch.vlog.Term expZ = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, "z"); - final karmaresearch.vlog.Term expW = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, "w"); - final karmaresearch.vlog.Term expV = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, "v"); + final karmaresearch.vlog.Term expX = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, + "x"); + final karmaresearch.vlog.Term expY = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, + "y"); + final karmaresearch.vlog.Term expZ = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, + "z"); + final karmaresearch.vlog.Term expW = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, + "w"); + final karmaresearch.vlog.Term expV = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, + "v"); final karmaresearch.vlog.Atom expAtomP1X = new karmaresearch.vlog.Atom("p1-1", expX); final karmaresearch.vlog.Atom expAtomP2XY = new karmaresearch.vlog.Atom("p2-2", expX, expY); final karmaresearch.vlog.Atom expAtomP3YZ = new karmaresearch.vlog.Atom("p3-2", expY, expZ); - final karmaresearch.vlog.Rule expectedRule1 = new karmaresearch.vlog.Rule(new karmaresearch.vlog.Atom[] { expAtomP1X }, + final karmaresearch.vlog.Rule expectedRule1 = new karmaresearch.vlog.Rule( + new karmaresearch.vlog.Atom[] { expAtomP1X }, new karmaresearch.vlog.Atom[] { expAtomP2XY, expAtomP3YZ }); final karmaresearch.vlog.Atom expAtomQXYZ = new karmaresearch.vlog.Atom("q-3", expX, expY, expZ); final karmaresearch.vlog.Atom expAtomQYW = new karmaresearch.vlog.Atom("q-2", expY, expW); final karmaresearch.vlog.Atom expAtomQ1XWZ = new karmaresearch.vlog.Atom("q1-3", expX, expW, expZ); final karmaresearch.vlog.Atom expAtomQ2XV = new karmaresearch.vlog.Atom("q2-2", expX, expV); - final karmaresearch.vlog.Rule expectedRule2 = new karmaresearch.vlog.Rule(new karmaresearch.vlog.Atom[] { expAtomQ2XV }, + final karmaresearch.vlog.Rule expectedRule2 = new karmaresearch.vlog.Rule( + new karmaresearch.vlog.Atom[] { expAtomQ2XV }, new karmaresearch.vlog.Atom[] { expAtomQ1XWZ, expAtomQYW, expAtomQXYZ }); - final karmaresearch.vlog.Rule[] vLogRuleArray = ModelToVLogConverter.toVLogRuleArray(Arrays.asList(rule1, rule2)); - final karmaresearch.vlog.Rule[] expectedRuleArray = new karmaresearch.vlog.Rule[] { expectedRule1, expectedRule2 }; + final karmaresearch.vlog.Rule[] vLogRuleArray = ModelToVLogConverter + .toVLogRuleArray(Arrays.asList(rule1, rule2)); + final karmaresearch.vlog.Rule[] expectedRuleArray = new karmaresearch.vlog.Rule[] { expectedRule1, + expectedRule2 }; assertArrayEquals(expectedRuleArray, vLogRuleArray); } @Test public void testVLogRuleRewritingStrategy() { - assertEquals(karmaresearch.vlog.VLog.RuleRewriteStrategy.NONE, ModelToVLogConverter.toVLogRuleRewriteStrategy(RuleRewriteStrategy.NONE)); + assertEquals(karmaresearch.vlog.VLog.RuleRewriteStrategy.NONE, + ModelToVLogConverter.toVLogRuleRewriteStrategy(RuleRewriteStrategy.NONE)); assertEquals(karmaresearch.vlog.VLog.RuleRewriteStrategy.AGGRESSIVE, ModelToVLogConverter.toVLogRuleRewriteStrategy(RuleRewriteStrategy.SPLIT_HEAD_PIECES)); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerTest.java index 1067715ef..ff3a60ae6 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerTest.java @@ -2,7 +2,6 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; /* * #%L @@ -111,15 +110,4 @@ public void testSimpleInference() throws EDBConfigurationException, IOException, } } - // TODO move to a test class for KnowledgeBase - @Test - public void testGenerateDataSourcesConfigEmpty() throws ReasonerStateException, IOException { - try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { - final String dataSourcesConfig = reasoner.generateDataSourcesConfig(); - assertTrue(dataSourcesConfig.isEmpty()); - - } - - } - } From 55daa68569781ccdb40c279c36c69d7dc5e6c966 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Wed, 21 Aug 2019 17:15:33 +0200 Subject: [PATCH 0145/1003] handle NonExistingPredicateException --- .../vlog4j/core/reasoner/Reasoner.java | 16 ++-- .../vlog4j/core/reasoner/ReasonerState.java | 26 +++--- .../reasoner/implementation/VLogReasoner.java | 86 +++++++++++-------- .../implementation/AnswerQueryTest.java | 39 ++++++--- .../ExportQueryAnswersToCsvFileTest.java | 66 ++++++++++++-- .../LoadDataFromCsvFileTest.java | 4 + .../LoadDataFromMemoryTest.java | 1 - .../LoadDataFromRdfFileTest.java | 5 ++ .../LoadDataFromSparqlQueryTest.java | 2 + .../implementation/ReasonerStateTest.java | 15 ---- .../vlog/ExportQueryResultToCsvFileTest.java | 5 +- .../core/reasoner/vlog/LargeAritiesTest.java | 21 +++-- .../reasoner/vlog/StratifiedNegationTest.java | 7 +- .../vlog/VLogDataFromCsvFileTest.java | 37 ++++++-- .../reasoner/vlog/VLogDataFromMemoryTest.java | 76 +++++++--------- .../vlog/VLogDataFromRdfFileTest.java | 39 ++++++--- .../core/reasoner/vlog/VLogQueryTest.java | 5 +- .../core/reasoner/vlog/VLogTermNamesTest.java | 10 +-- 18 files changed, 283 insertions(+), 177 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 93435ffb2..7a4236f98 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -89,7 +89,7 @@ * */ -public interface Reasoner extends AutoCloseable, KnowledgeBaseListener { +public interface Reasoner extends AutoCloseable, KnowledgeBaseListener { /** * Factory method that to instantiate a Reasoner with an empty knowledge base. @@ -399,13 +399,13 @@ boolean reason() * @throws ReasonerStateException if this method is called before loading * ({@link Reasoner#load()} or after closing * ({@link Reasoner#close()}). - * @throws IllegalArgumentException if the given {@code queryAtom} contains - * terms ({@link Atom#getTerms()}) which are - * not of type {@link TermType#CONSTANT} or + * + * @throws IllegalArgumentException if the given {@code query} contains terms + * ({@link Atom#getTerms()}) which are not of + * type {@link TermType#CONSTANT} or * {@link TermType#VARIABLE}. */ - QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBlanks) - throws ReasonerStateException; + QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBlanks) throws ReasonerStateException; // TODO add examples to query javadoc /** @@ -461,11 +461,9 @@ QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBlanks) * */ // TODO update javadoc with return type - MaterialisationState exportQueryAnswersToCsv(PositiveLiteral query, String csvFilePath, - boolean includeBlanks) + MaterialisationState exportQueryAnswersToCsv(PositiveLiteral query, String csvFilePath, boolean includeBlanks) throws ReasonerStateException, IOException; - /** * Resets the reasoner to a pre-loading state (before the call of * {@link #load()} method). All facts inferred by reasoning are discarded. Rules diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java index 19305ab50..4dbf79176 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java @@ -37,26 +37,28 @@ public enum ReasonerState { /** * State a Reasoner is in after method {@link Reasoner#load()} has been called, * and before method {@link Reasoner#reason()} has been called. The Reasoner can - * only be loaded once. Loading in this state is ineffective. Adding rules, fact - * and fact data sources and setting the rule re-writing strategy are not - * allowed in this state. */ AFTER_LOADING("loaded"), /** * State a Reasoner is in after method {@link Reasoner#reason()} has been - * called. The Reasoner cannot reason again, once it reached this state. Loading - * and setting the reasoning algorithm this state are ineffective. Reasoning, - * adding rules, fact and fact data sources and setting the rule re-writing - * strategy are not allowed in this state. */ - AFTER_REASONING("completed reasoning"), + AFTER_REASONING("after reasoning"), + + /** + * State in which the knowledge base of an already loaded reasoner has been + * changed. This can occur if the knowledge base has been modified after loading + * (in {@link ReasonerState#AFTER_LOADING} state), or after reasoning (in + * {@link ReasonerState#AFTER_REASONING} state). + */ + + KNOWLEDGE_BASE_CHANGED("knowledge base changed"), /** * State a Reasoner is in after method {@link Reasoner#close()} has been called. - * The Reasoner cannot reason again, once it reached this state. - * Loading and setting the reasoning algorithm in this state are ineffective. - * Reasoning, adding rules, fact and fact data sources and setting the rule re-writing - * strategy are not allowed in this state. + * The Reasoner cannot reason again, once it reached this state. Loading and + * setting the reasoning algorithm in this state are ineffective. Reasoning, + * adding rules, fact and fact data sources and setting the rule re-writing + * strategy are not allowed in this state. */ AFTER_CLOSING("closed"); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 89840f16c..ae27fa0e7 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -1,6 +1,7 @@ package org.semanticweb.vlog4j.core.reasoner.implementation; import java.io.IOException; +import java.text.MessageFormat; import java.util.Formatter; import java.util.HashSet; import java.util.Map; @@ -29,6 +30,7 @@ import karmaresearch.vlog.AlreadyStartedException; import karmaresearch.vlog.EDBConfigurationException; import karmaresearch.vlog.MaterializationException; +import karmaresearch.vlog.NonExistingPredicateException; import karmaresearch.vlog.NotStartedException; import karmaresearch.vlog.TermQueryResultIterator; import karmaresearch.vlog.VLog; @@ -225,13 +227,11 @@ public boolean reason() runChase(); break; case AFTER_LOADING: - // TODO check if changes occurred in the KB. If yes, only runChase(); otherwise, - // reset and reload. runChase(); break; + + case KNOWLEDGE_BASE_CHANGED: case AFTER_REASONING: - // TODO check if changes occurred in the KB. If yes, reset, reload, and run - // chase. If not, do nothing. resetReasoner(); load(); runChase(); @@ -282,6 +282,9 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBla stringQueryResultIterator = this.vLog.query(vLogAtom, true, filterBlanks); } catch (final NotStartedException e) { throw new RuntimeException("Inconsistent reasoner state.", e); + } catch (final NonExistingPredicateException e1) { + throw new IllegalArgumentException(MessageFormat.format( + "The query predicate does not occur in the loaded Knowledge Base: {0}!", query.getPredicate()), e1); } return new QueryResultIterator(stringQueryResultIterator, this.materialisationState); @@ -289,7 +292,8 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBla @Override public MaterialisationState exportQueryAnswersToCsv(final PositiveLiteral query, final String csvFilePath, - final boolean includeBlanks) throws ReasonerStateException, IOException { + final boolean includeBlanks) + throws ReasonerStateException, IOException { final boolean filterBlanks = !includeBlanks; if (this.reasonerState == ReasonerState.BEFORE_LOADING) { throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); @@ -305,6 +309,9 @@ public MaterialisationState exportQueryAnswersToCsv(final PositiveLiteral query, this.vLog.writeQueryResultsToCsv(vLogAtom, csvFilePath, filterBlanks); } catch (final NotStartedException e) { throw new RuntimeException("Inconsistent reasoner state.", e); + } catch (final NonExistingPredicateException e1) { + throw new IllegalArgumentException(MessageFormat.format( + "The query predicate does not occur in the loaded Knowledge Base: {0}!", query.getPredicate()), e1); } return this.materialisationState; } @@ -442,46 +449,53 @@ public CyclicityResult checkForCycles() throws ReasonerStateException, NotStarte @Override public void onStatementsAdded(Set statementsAdded) { - updateMaterialisationStateOnStatementsAdded(statementsAddedInvalidateMaterialisation(statementsAdded)); + // TODO more elaborate materialisation state handling + // updateReasonerStateToKnowledgeBaseChanged(); + // updateMaterialisationStateOnStatementsAdded(statementsAddedInvalidateMaterialisation(statementsAdded)); + + updateReasonerToKnowledgeBaseChanged(); } @Override public void onStatementAdded(Statement statementAdded) { - updateMaterialisationStateOnStatementsAdded(statementAddedInvalidatesMaterialisation(statementAdded)); - } - - private boolean statementsAddedInvalidateMaterialisation(Set statementsAdded) { - // if statements contain Facts or DataSourceDeclarations for predicates that - // appear as negated in rules, return true - // TODO implement - return false; - - } + // TODO more elaborate materialisation state handling + // updateReasonerStateToKnowledgeBaseChanged(); + // updateMaterialisationStateOnStatementsAdded(statementAddedInvalidatesMaterialisation(statementAdded)); - private boolean statementAddedInvalidatesMaterialisation(Statement statementAdded) { - // if statement is a Facts or a DataSourceDeclarations for predicates that - // appear as negated in rules, return true - // TODO implement - return false; + updateReasonerToKnowledgeBaseChanged(); } - private void updateMaterialisationStateOnStatementsAdded(boolean materialisationInvalidated) { - switch (materialisationState) { - case WRONG: - // added statements do not change the WRONG state - break; + private void updateReasonerToKnowledgeBaseChanged() { + if (this.reasonerState.equals(ReasonerState.AFTER_LOADING) + || this.reasonerState.equals(ReasonerState.AFTER_REASONING)) { - case INCOMPLETE: - case COMPLETE: - if (materialisationInvalidated) { - this.materialisationState = materialisationInvalidated ? MaterialisationState.WRONG - : MaterialisationState.INCOMPLETE; - } - break; - - default: - break; + this.reasonerState = ReasonerState.KNOWLEDGE_BASE_CHANGED; + this.materialisationState = MaterialisationState.WRONG; } } +// private void updateReasonerStateToKnowledgeBaseChanged() { +// if (this.reasonerState.equals(ReasonerState.AFTER_LOADING) +// || this.reasonerState.equals(ReasonerState.AFTER_REASONING)) { +// this.reasonerState = ReasonerState.KNOWLEDGE_BASE_CHANGED; +// } +// } + +// private boolean statementsAddedInvalidateMaterialisation(Set statementsAdded) { +// // TODO implement and use to decide materialisation state +// return true; +// +// } +// +// private boolean statementAddedInvalidatesMaterialisation(Statement statementAdded) { +// // TODO implement and use to decide materialisation state +// return true; +// } + +// private void updateMaterialisationStateOnStatementsAdded(boolean materialisationInvalidated) { +// if (this.reasonerState.equals(ReasonerState.KNOWLEDGE_BASE_CHANGED) && materialisationInvalidated) { +// this.materialisationState = MaterialisationState.WRONG; +// } +// } + } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java index ecd23c64e..030e6db11 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java @@ -250,8 +250,8 @@ public void queryResultWithBlanks() } } - @Test - public void queryEmptyKnowledgeBase() + @Test(expected = IllegalArgumentException.class) + public void queryEmptyKnowledgeBaseBeforeReasoning() throws IOException, EdbIdbSeparationException, ReasonerStateException, IncompatiblePredicateArityException { final KnowledgeBase kb = new KnowledgeBase(); @@ -259,15 +259,22 @@ public void queryEmptyKnowledgeBase() reasoner.load(); final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("P", Expressions.makeVariable("?x")); - final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtom, true); - Assert.assertFalse(queryResultIterator.hasNext()); - queryResultIterator.close(); + reasoner.answerQuery(queryAtom, true); + } + } + + @Test(expected = IllegalArgumentException.class) + public void queryEmptyKnowledgeBaseAfterReasoning() + throws IOException, EdbIdbSeparationException, ReasonerStateException, IncompatiblePredicateArityException { + final KnowledgeBase kb = new KnowledgeBase(); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); reasoner.reason(); - try (final QueryResultIterator queryResultIteratorAfterReason = reasoner.answerQuery(queryAtom, true)) { - assertFalse(queryResultIteratorAfterReason.hasNext()); - } + final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("P", Expressions.makeVariable("?x")); + reasoner.answerQuery(queryAtom, true); } } @@ -307,15 +314,25 @@ public void queryEmptyFacts() throws EDBConfigurationException, IOException, Edb try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); - final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("P", Expressions.makeVariable("?x")); - try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtom, true)) { + final PositiveLiteral queryAtom1 = Expressions.makePositiveLiteral("p", Expressions.makeVariable("?x")); + try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtom1, true)) { + Assert.assertFalse(queryResultIterator.hasNext()); + queryResultIterator.close(); + } + + final PositiveLiteral queryAtom2 = Expressions.makePositiveLiteral("q", Expressions.makeVariable("?x")); + try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtom2, true)) { Assert.assertFalse(queryResultIterator.hasNext()); queryResultIterator.close(); } reasoner.reason(); - try (final QueryResultIterator queryResultIteratorAfterReason = reasoner.answerQuery(queryAtom, true)) { + try (final QueryResultIterator queryResultIteratorAfterReason = reasoner.answerQuery(queryAtom1, true)) { + assertFalse(queryResultIteratorAfterReason.hasNext()); + } + + try (final QueryResultIterator queryResultIteratorAfterReason = reasoner.answerQuery(queryAtom2, true)) { assertFalse(queryResultIteratorAfterReason.hasNext()); } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExportQueryAnswersToCsvFileTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExportQueryAnswersToCsvFileTest.java index 189c2748d..8c63a0f8e 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExportQueryAnswersToCsvFileTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExportQueryAnswersToCsvFileTest.java @@ -54,7 +54,7 @@ public void testEDBQuerySameConstantSubstitutesSameVariableName() final boolean includeBlanks = false; // final String csvFilePath = CSV_EXPORT_FOLDER + "output"; final List> factCCD = Arrays.asList(Arrays.asList("c", "c", "d")); - + final KnowledgeBase kb = new KnowledgeBase(); kb.addStatement(fact); @@ -74,13 +74,13 @@ public void testEDBQuerySameConstantSubstitutesSameVariableName() final List> csvContentXXZ = FileDataSourceTestUtils.getCSVContent(csvFilePathXXZ); assertEquals(factCCD, csvContentXXZ); - final PositiveLiteral queryAtomXXX = Expressions.makePositiveLiteral("q", x, x, x); + final PositiveLiteral queryAtomXXX = Expressions.makePositiveLiteral(predicate, x, x, x); final String csvFilePathXXX = FileDataSourceTestUtils.OUTPUT_FOLDER + "outputXXX.csv"; reasoner.exportQueryAnswersToCsv(queryAtomXXX, csvFilePathXXX, includeBlanks); final List> csvContentXXX = FileDataSourceTestUtils.getCSVContent(csvFilePathXXX); assertTrue(csvContentXXX.isEmpty()); - final PositiveLiteral queryAtomXYX = Expressions.makePositiveLiteral("q", x, y, x); + final PositiveLiteral queryAtomXYX = Expressions.makePositiveLiteral(predicate, x, y, x); final String csvFilePathXYX = FileDataSourceTestUtils.OUTPUT_FOLDER + "outputXYX.csv"; reasoner.exportQueryAnswersToCsv(queryAtomXYX, csvFilePathXYX, includeBlanks); final List> csvContentXYX = FileDataSourceTestUtils.getCSVContent(csvFilePathXYX); @@ -89,22 +89,72 @@ public void testEDBQuerySameConstantSubstitutesSameVariableName() } - @Test - public void testExportQueryEmptyKnowledgeBase() + @Test(expected = IllegalArgumentException.class) + public void testExportQueryEmptyKnowledgeBaseBeforeReasoningIncludeBlanks() throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("p", Expressions.makeVariable("?x"), Expressions.makeVariable("?y")); - + final KnowledgeBase kb = new KnowledgeBase(); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); final String emptyFilePath = FileDataSourceTestUtils.OUTPUT_FOLDER + "empty.csv"; reasoner.exportQueryAnswersToCsv(queryAtom, emptyFilePath, true); - assertTrue(FileDataSourceTestUtils.getCSVContent(emptyFilePath).isEmpty()); + } + } + + @Test(expected = IllegalArgumentException.class) + public void testExportQueryEmptyKnowledgeBaseBeforeReasoningExcludeBlanks() + throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + + final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("p", Expressions.makeVariable("?x"), + Expressions.makeVariable("?y")); + + final KnowledgeBase kb = new KnowledgeBase(); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + final String emptyFilePath = FileDataSourceTestUtils.OUTPUT_FOLDER + "empty.csv"; + + reasoner.exportQueryAnswersToCsv(queryAtom, emptyFilePath, false); + } + } + @Test(expected = IllegalArgumentException.class) + public void testExportQueryEmptyKnowledgeBaseAfterReasoningIncludeBlanks() + throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + + final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("p", Expressions.makeVariable("?x"), + Expressions.makeVariable("?y")); + + final KnowledgeBase kb = new KnowledgeBase(); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + reasoner.reason(); + + final String emptyFilePath = FileDataSourceTestUtils.OUTPUT_FOLDER + "empty.csv"; + reasoner.exportQueryAnswersToCsv(queryAtom, emptyFilePath, true); + } + } + + @Test(expected = IllegalArgumentException.class) + public void testExportQueryEmptyKnowledgeBaseAfterReasoningExcludeBlanks() + throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + + final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("p", Expressions.makeVariable("?x"), + Expressions.makeVariable("?y")); + + final KnowledgeBase kb = new KnowledgeBase(); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + reasoner.reason(); + + final String emptyFilePath = FileDataSourceTestUtils.OUTPUT_FOLDER + "empty.csv"; reasoner.exportQueryAnswersToCsv(queryAtom, emptyFilePath, false); - assertTrue(FileDataSourceTestUtils.getCSVContent(emptyFilePath).isEmpty()); } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromCsvFileTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromCsvFileTest.java index 9b0f5af85..b96d868a2 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromCsvFileTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromCsvFileTest.java @@ -31,6 +31,7 @@ import java.util.List; import java.util.Set; +import org.junit.Ignore; import org.junit.Test; import org.mockito.internal.util.collections.Sets; import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; @@ -54,6 +55,9 @@ public class LoadDataFromCsvFileTest { private static final Set> expectedUnaryQueryResult = Sets.newSet(Arrays.asList(makeConstant("c1")), Arrays.asList(makeConstant("c2"))); + @Ignore + // FIXME: test ignored because of a bug in VLog. Remore the @Ignore annotation + // after bug is fixed. @Test public void testLoadEmptyCsvFile() throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java index 92ebbab92..75af663b4 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java @@ -30,7 +30,6 @@ import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.BlankImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java index dd2a510c0..c26a6ab22 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java @@ -32,6 +32,7 @@ import java.util.List; import java.util.Set; +import org.junit.Ignore; import org.junit.Test; import org.mockito.internal.util.collections.Sets; import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; @@ -59,6 +60,8 @@ public class LoadDataFromRdfFileTest { Arrays.asList(makeConstant("http://example.org/c1"), makeConstant("http://example.org/q"), makeDatatypeConstant("test string", "http://www.w3.org/2001/XMLSchema#string"))); + @Ignore + //TODO test fails for now, because of a VLog bug. Remove the @Ignore annotation after VLog bug is fixed. @Test public void testLoadEmptyRdfFile() throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { @@ -66,6 +69,8 @@ public void testLoadEmptyRdfFile() new RdfFileDataSource(new File(FileDataSourceTestUtils.INPUT_FOLDER + "empty.nt"))); } + @Ignore + //TODO test fails for now, because of a VLog bug. Remove the @Ignore annotation after VLog bug is fixed. @Test public void testLoadEmptyRdfFileGz() throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromSparqlQueryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromSparqlQueryTest.java index 388f07d4f..0b1607042 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromSparqlQueryTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromSparqlQueryTest.java @@ -49,6 +49,7 @@ public class LoadDataFromSparqlQueryTest { * @throws EdbIdbSeparationException * @throws IOException * @throws IncompatiblePredicateArityException + * @throws QueryPredicateNonExistentException */ @Ignore // Ignored during CI because it makes lengthy calls to remote servers @Test @@ -109,6 +110,7 @@ public void testSimpleSparqlQueryHttps() * @throws EdbIdbSeparationException * @throws IOException * @throws IncompatiblePredicateArityException + * @throws QueryPredicateNonExistentException */ @Ignore // Ignored during CI because it makes lengthy calls to remote servers @Test diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java index c88cb7fe4..28c35a81f 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java @@ -263,31 +263,16 @@ public void testResetEmptyKnowledgeBase() try (final VLogReasoner reasoner = new VLogReasoner(kb)) { // 1. load and reason reasoner.load(); - try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(exampleQueryAtom, true)) { - assertFalse(queryResultIterator.hasNext()); - } reasoner.reason(); - try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(exampleQueryAtom, true)) { - assertFalse(queryResultIterator.hasNext()); - } reasoner.resetReasoner(); // 2. load again reasoner.load(); - try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(exampleQueryAtom, true)) { - assertFalse(queryResultIterator.hasNext()); - } reasoner.resetReasoner(); // 3. load and reason again reasoner.load(); - try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(exampleQueryAtom, true)) { - assertFalse(queryResultIterator.hasNext()); - } reasoner.reason(); - try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(exampleQueryAtom, true)) { - assertFalse(queryResultIterator.hasNext()); - } reasoner.close(); } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/ExportQueryResultToCsvFileTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/ExportQueryResultToCsvFileTest.java index 52972fd71..c1f4c8580 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/ExportQueryResultToCsvFileTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/ExportQueryResultToCsvFileTest.java @@ -31,13 +31,14 @@ import karmaresearch.vlog.Atom; import karmaresearch.vlog.EDBConfigurationException; +import karmaresearch.vlog.NonExistingPredicateException; import karmaresearch.vlog.NotStartedException; import karmaresearch.vlog.VLog; public class ExportQueryResultToCsvFileTest { @Test - public void testExportUnaryPredicateFacts() throws EDBConfigurationException, NotStartedException, IOException { + public void testExportUnaryPredicateFacts() throws EDBConfigurationException, NotStartedException, NonExistingPredicateException, IOException { final String[][] argsAMatrix = { { "c1" }, { "c2" } }; final List> expectedQueryResult = Arrays.asList(Arrays.asList("c1"), Arrays.asList("c2")); final VLog vLog = new VLog(); @@ -50,7 +51,7 @@ public void testExportUnaryPredicateFacts() throws EDBConfigurationException, No } @Test - public void testExportBinaryPredicateFacts() throws EDBConfigurationException, NotStartedException, IOException { + public void testExportBinaryPredicateFacts() throws EDBConfigurationException, NotStartedException, NonExistingPredicateException, IOException { final String[][] argsAMatrix = { { "c1", "c2" }, { "c3", "c4" } }; final List> expectedQueryResult = Arrays.asList(Arrays.asList("c1", "c2"), Arrays.asList("c3", "c4")); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/LargeAritiesTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/LargeAritiesTest.java index ded1604f3..3f4a364db 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/LargeAritiesTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/LargeAritiesTest.java @@ -32,6 +32,7 @@ import karmaresearch.vlog.Atom; import karmaresearch.vlog.EDBConfigurationException; +import karmaresearch.vlog.NonExistingPredicateException; import karmaresearch.vlog.NotStartedException; import karmaresearch.vlog.Rule; import karmaresearch.vlog.Term; @@ -40,7 +41,8 @@ import karmaresearch.vlog.VLog.RuleRewriteStrategy; /** - * Tests that reasoning and querying with predicates of large arities is allowed. + * Tests that reasoning and querying with predicates of large arities is + * allowed. * * @author Irina Dragoste * @@ -51,26 +53,27 @@ public class LargeAritiesTest { final static int VARIABLES_PER_RULE_LIMIT = 255; @Test - public void testLargeNumberOfVariablesPerRule() throws NotStartedException, EDBConfigurationException { + public void testLargeNumberOfVariablesPerRule() throws NotStartedException, EDBConfigurationException, NonExistingPredicateException { testNumberOfVariablesPerRule(VARIABLES_PER_RULE_LIMIT); } @Test(expected = IllegalArgumentException.class) - public void testNumberOfVariablesPerRuleExceedsLimit() throws NotStartedException, EDBConfigurationException { + public void testNumberOfVariablesPerRuleExceedsLimit() throws NotStartedException, EDBConfigurationException, NonExistingPredicateException { testNumberOfVariablesPerRule(VARIABLES_PER_RULE_LIMIT + 1); } @Test - public void testLargePredicateArities() throws NotStartedException, EDBConfigurationException { + public void testLargePredicateArities() throws NotStartedException, EDBConfigurationException, NonExistingPredicateException { testPredicateArity(PREDICATE_ARITY_LIMIT); } @Test(expected = IllegalArgumentException.class) - public void testPredicateAritiesExceedLimit() throws NotStartedException, EDBConfigurationException { + public void testPredicateAritiesExceedLimit() throws NotStartedException, EDBConfigurationException, NonExistingPredicateException { testPredicateArity(PREDICATE_ARITY_LIMIT + 1); } - private void testNumberOfVariablesPerRule(int variablesPerRuleLimit) throws EDBConfigurationException, NotStartedException { + private void testNumberOfVariablesPerRule(int variablesPerRuleLimit) + throws EDBConfigurationException, NotStartedException, NonExistingPredicateException { final VLog vLog = new VLog(); final String[][] pFactArguments = { { "c" } }; @@ -100,7 +103,8 @@ private void testNumberOfVariablesPerRule(int variablesPerRuleLimit) throws EDBC vLog.stop(); } - private void testPredicateArity(final int predicateArityLimit) throws EDBConfigurationException, NotStartedException { + private void testPredicateArity(final int predicateArityLimit) + throws EDBConfigurationException, NotStartedException, NonExistingPredicateException { final List constants = new ArrayList<>(); for (int i = 0; i < predicateArityLimit; i++) { constants.add("c" + i); @@ -113,7 +117,8 @@ private void testPredicateArity(final int predicateArityLimit) throws EDBConfigu } final Term[] terms = variables.toArray(new Term[variables.size()]); - final Rule rule = VLogExpressions.makeRule(VLogExpressions.makeAtom("q", terms), VLogExpressions.makeAtom("p", terms)); + final Rule rule = VLogExpressions.makeRule(VLogExpressions.makeAtom("q", terms), + VLogExpressions.makeAtom("p", terms)); final Atom queryAtomQPredicate = VLogExpressions.makeAtom("q", terms); final VLog vLog = new VLog(); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/StratifiedNegationTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/StratifiedNegationTest.java index 994f07761..2a8e06b37 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/StratifiedNegationTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/StratifiedNegationTest.java @@ -33,6 +33,7 @@ import karmaresearch.vlog.Atom; import karmaresearch.vlog.EDBConfigurationException; import karmaresearch.vlog.MaterializationException; +import karmaresearch.vlog.NonExistingPredicateException; import karmaresearch.vlog.NotStartedException; import karmaresearch.vlog.Rule; import karmaresearch.vlog.Term; @@ -52,7 +53,7 @@ public class StratifiedNegationTest { * @throws NotStartedException */ @Test - public void testSimpleInputNegation() throws EDBConfigurationException, NotStartedException { + public void testSimpleInputNegation() throws EDBConfigurationException, NotStartedException, NonExistingPredicateException { final Term varX = makeVariable("x"); // P(x), Not(Q(x)) -> R(x) . @@ -111,7 +112,7 @@ public void testSimpleInputNegation() throws EDBConfigurationException, NotStart * @throws NotStartedException */ @Test - public void testStratifiedNegationOnIDB() throws EDBConfigurationException, NotStartedException { + public void testStratifiedNegationOnIDB() throws EDBConfigurationException, NotStartedException, NonExistingPredicateException { final Term varX = VLogExpressions.makeVariable("x"); final Atom isP = VLogExpressions.makeAtom("P", varX); @@ -159,7 +160,7 @@ public void testStratifiedNegationOnIDB() throws EDBConfigurationException, NotS * @throws NotStartedException */ @Test(expected = MaterializationException.class) - public void testNegationOnIDBUnstratifiable() throws EDBConfigurationException, NotStartedException { + public void testNegationOnIDBUnstratifiable() throws EDBConfigurationException, NotStartedException, NonExistingPredicateException { final Term varX = VLogExpressions.makeVariable("x"); final String predP = "P"; final String predQ = "Q"; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromCsvFileTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromCsvFileTest.java index 222c34a59..a580baeb4 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromCsvFileTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromCsvFileTest.java @@ -21,7 +21,6 @@ */ import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.ArrayList; @@ -34,6 +33,7 @@ import karmaresearch.vlog.AlreadyStartedException; import karmaresearch.vlog.Atom; import karmaresearch.vlog.EDBConfigurationException; +import karmaresearch.vlog.NonExistingPredicateException; import karmaresearch.vlog.NotStartedException; import karmaresearch.vlog.Term; import karmaresearch.vlog.TermQueryResultIterator; @@ -51,7 +51,7 @@ public class VLogDataFromCsvFileTest { Arrays.asList(VLogExpressions.makeConstant("c1")), Arrays.asList(VLogExpressions.makeConstant("c2"))); private static List> getUnaryQueryResults(final VLog vLog, final String predicateName) - throws NotStartedException { + throws NotStartedException, NonExistingPredicateException { final TermQueryResultIterator queryResultsPIterator = vLog .query(new Atom(predicateName, VLogExpressions.makeVariable("x"))); final List> queryResults = new ArrayList<>( @@ -60,8 +60,8 @@ private static List> getUnaryQueryResults(final VLog vLog, final Stri } @Test - public void testLoadDataFomCsvString() - throws AlreadyStartedException, EDBConfigurationException, IOException, NotStartedException { + public void testLoadDataFomCsvString() throws AlreadyStartedException, EDBConfigurationException, IOException, + NotStartedException, NonExistingPredicateException { final String unaryPredicatesEDBConfig = "EDB0_predname=" + unzippedUnaryPredicateName1 + "\n" + "EDB0_type=INMEMORY" + "\n" + "EDB0_param0=" + FileDataSourceTestUtils.INPUT_FOLDER + "\n" + "EDB0_param1=" + FileDataSourceTestUtils.unzippedUnaryCsvFileRoot + "\n" + "EDB1_predname=" @@ -71,7 +71,8 @@ public void testLoadDataFomCsvString() + "\n" + "EDB2_type=INMEMORY" + "\n" + "EDB2_param0=" + FileDataSourceTestUtils.INPUT_FOLDER + "\n" + "EDB2_param1=" + FileDataSourceTestUtils.zippedUnaryCsvFileRoot + "\n" + "EDB3_predname=" + zippedUnaryPredicateName2 + "\n" + "EDB3_type=INMEMORY" + "\n" + "EDB3_param0=" - + FileDataSourceTestUtils.INPUT_FOLDER + "\n" + "EDB3_param1=" + FileDataSourceTestUtils.zippedUnaryCsvFileRoot; + + FileDataSourceTestUtils.INPUT_FOLDER + "\n" + "EDB3_param1=" + + FileDataSourceTestUtils.zippedUnaryCsvFileRoot; final VLog vLog = new VLog(); vLog.start(unaryPredicatesEDBConfig, false); @@ -86,10 +87,30 @@ public void testLoadDataFomCsvString() assertEquals(expectedUnaryQueryResult, queryResult2); assertEquals(queryResult2, queryResultZipped2); - final List> queryResultsEmpty = getUnaryQueryResults(vLog, emptyUnaryPredicateName); - assertTrue(queryResultsEmpty.isEmpty()); - vLog.stop(); } + @Test(expected = NonExistingPredicateException.class) + public void testLoadDataFomCsvStringNonExistingPredicate() throws AlreadyStartedException, + EDBConfigurationException, IOException, NotStartedException, NonExistingPredicateException { + final String unaryPredicatesEDBConfig = "EDB0_predname=" + unzippedUnaryPredicateName1 + "\n" + + "EDB0_type=INMEMORY" + "\n" + "EDB0_param0=" + FileDataSourceTestUtils.INPUT_FOLDER + "\n" + + "EDB0_param1=" + FileDataSourceTestUtils.unzippedUnaryCsvFileRoot + "\n" + "EDB1_predname=" + + unzippedUnaryPredicateName2 + "\n" + "EDB1_type=INMEMORY" + "\n" + "EDB1_param0=" + + FileDataSourceTestUtils.INPUT_FOLDER + "\n" + "EDB1_param1=" + + FileDataSourceTestUtils.unzippedUnaryCsvFileRoot + "\n" + "EDB2_predname=" + zippedUnaryPredicateName1 + + "\n" + "EDB2_type=INMEMORY" + "\n" + "EDB2_param0=" + FileDataSourceTestUtils.INPUT_FOLDER + "\n" + + "EDB2_param1=" + FileDataSourceTestUtils.zippedUnaryCsvFileRoot + "\n" + "EDB3_predname=" + + zippedUnaryPredicateName2 + "\n" + "EDB3_type=INMEMORY" + "\n" + "EDB3_param0=" + + FileDataSourceTestUtils.INPUT_FOLDER + "\n" + "EDB3_param1=" + + FileDataSourceTestUtils.zippedUnaryCsvFileRoot; + final VLog vLog = new VLog(); + try { + vLog.start(unaryPredicatesEDBConfig, false); + getUnaryQueryResults(vLog, emptyUnaryPredicateName); + } finally { + vLog.stop(); + } + } + } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromMemoryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromMemoryTest.java index 02b5411e9..cbe1d005b 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromMemoryTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromMemoryTest.java @@ -35,6 +35,7 @@ import karmaresearch.vlog.AlreadyStartedException; import karmaresearch.vlog.EDBConfigurationException; +import karmaresearch.vlog.NonExistingPredicateException; import karmaresearch.vlog.NotStartedException; import karmaresearch.vlog.Rule; import karmaresearch.vlog.Term; @@ -51,8 +52,8 @@ public class VLogDataFromMemoryTest { @Test - public void testVLogSimpleInference() - throws AlreadyStartedException, EDBConfigurationException, IOException, NotStartedException { + public void testVLogSimpleInference() throws AlreadyStartedException, EDBConfigurationException, IOException, + NotStartedException, NonExistingPredicateException { final String[][] argsAMatrix = { { "a" }, { "b" } }; final karmaresearch.vlog.Term varX = VLogExpressions.makeVariable("x"); @@ -92,10 +93,10 @@ public void testVLogSimpleInference() vLog.stop(); } - + @Test - public void testBooleanQueryTrueIncludeConstantsFalse() - throws AlreadyStartedException, EDBConfigurationException, IOException, NotStartedException { + public void testBooleanQueryTrueIncludeConstantsFalse() throws AlreadyStartedException, EDBConfigurationException, + IOException, NotStartedException, NonExistingPredicateException { // Creating rules and facts final String[][] argsAMatrix = { { "a", "a" } }; final karmaresearch.vlog.Term varX = VLogExpressions.makeVariable("X"); @@ -137,8 +138,8 @@ public void testBooleanQueryTrueIncludeConstantsFalse() } @Test - public void testBooleanQueryTrueIncludeConstantsTrue() - throws AlreadyStartedException, EDBConfigurationException, IOException, NotStartedException { + public void testBooleanQueryTrueIncludeConstantsTrue() throws AlreadyStartedException, EDBConfigurationException, + IOException, NotStartedException, NonExistingPredicateException { // Creating rules and facts final String[][] argsAMatrix = { { "a", "a" } }; final karmaresearch.vlog.Term varX = VLogExpressions.makeVariable("X"); @@ -184,8 +185,8 @@ public void testBooleanQueryTrueIncludeConstantsTrue() } @Test - public void testBooleanQueryFalse() - throws AlreadyStartedException, EDBConfigurationException, IOException, NotStartedException { + public void testBooleanQueryFalse() throws AlreadyStartedException, EDBConfigurationException, IOException, + NotStartedException, NonExistingPredicateException { final String[][] argsAMatrix = { { "a" } }; final karmaresearch.vlog.Term varX = VLogExpressions.makeVariable("X"); final karmaresearch.vlog.Atom atomBx = new karmaresearch.vlog.Atom("B", varX); @@ -209,50 +210,39 @@ public void testBooleanQueryFalse() vLog.stop(); } - @Test - public void queryEmptyKnowledgeBase() - throws NotStartedException, AlreadyStartedException, EDBConfigurationException, IOException { + @Test(expected = NonExistingPredicateException.class) + public void queryEmptyKnowledgeBaseBeforeReasoning() throws NotStartedException, AlreadyStartedException, + EDBConfigurationException, IOException, NonExistingPredicateException { // Start VLog final VLog vLog = new VLog(); - vLog.start(StringUtils.EMPTY, false); + try { + vLog.start(StringUtils.EMPTY, false); - final karmaresearch.vlog.Atom queryAtom = new karmaresearch.vlog.Atom("P", VLogExpressions.makeVariable("?x")); + final karmaresearch.vlog.Atom queryAtom = new karmaresearch.vlog.Atom("P", + VLogExpressions.makeVariable("?x")); - final TermQueryResultIterator stringQueryResultIterator = vLog.query(queryAtom); - Assert.assertFalse(stringQueryResultIterator.hasNext()); - stringQueryResultIterator.close(); - - vLog.materialize(true); - - final TermQueryResultIterator queryResultIteratorAfterReason = vLog.query(queryAtom); - Assert.assertFalse(queryResultIteratorAfterReason.hasNext()); - queryResultIteratorAfterReason.close(); - - vLog.stop(); + vLog.query(queryAtom); + } finally { + vLog.stop(); + } } - @Test - public void queryEmptyKnowledgeBaseSetRules() - throws NotStartedException, AlreadyStartedException, EDBConfigurationException, IOException { + @Test(expected = NonExistingPredicateException.class) + public void queryEmptyKnowledgeBaseAfterReasoning() throws NotStartedException, AlreadyStartedException, + EDBConfigurationException, IOException, NonExistingPredicateException { // Start VLog final VLog vLog = new VLog(); - vLog.start(StringUtils.EMPTY, false); - - vLog.setRules(new Rule[] {}, VLog.RuleRewriteStrategy.NONE); + try { + vLog.start(StringUtils.EMPTY, false); + vLog.materialize(true); - final karmaresearch.vlog.Atom queryAtom = new karmaresearch.vlog.Atom("P", VLogExpressions.makeVariable("?x")); + final karmaresearch.vlog.Atom queryAtom = new karmaresearch.vlog.Atom("P", + VLogExpressions.makeVariable("?x")); - final TermQueryResultIterator stringQueryResultIterator = vLog.query(queryAtom); - Assert.assertFalse(stringQueryResultIterator.hasNext()); - stringQueryResultIterator.close(); - - vLog.materialize(true); - - final TermQueryResultIterator queryResultIteratorAfterReason = vLog.query(queryAtom); - Assert.assertFalse(queryResultIteratorAfterReason.hasNext()); - queryResultIteratorAfterReason.close(); - - vLog.stop(); + vLog.query(queryAtom); + } finally { + vLog.stop(); + } } } \ No newline at end of file diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromRdfFileTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromRdfFileTest.java index 83f41c031..7eaf1d6ab 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromRdfFileTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromRdfFileTest.java @@ -1,19 +1,17 @@ package org.semanticweb.vlog4j.core.reasoner.vlog; -import static org.junit.Assert.assertEquals; - /*- * #%L * VLog4j Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2019 VLog4j Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,7 +20,7 @@ * #L% */ -import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; import java.io.IOException; import java.util.ArrayList; @@ -35,6 +33,7 @@ import karmaresearch.vlog.AlreadyStartedException; import karmaresearch.vlog.Atom; import karmaresearch.vlog.EDBConfigurationException; +import karmaresearch.vlog.NonExistingPredicateException; import karmaresearch.vlog.NotStartedException; import karmaresearch.vlog.Term; import karmaresearch.vlog.TermQueryResultIterator; @@ -55,7 +54,7 @@ public class VLogDataFromRdfFileTest { VLogExpressions.makeConstant("\"test string\"^^"))); private static List> getTernaryQueryResults(final VLog vLog, final String predicateName) - throws NotStartedException { + throws NotStartedException, NonExistingPredicateException { final TermQueryResultIterator queryResultsPIterator = vLog .query(new Atom(predicateName, VLogExpressions.makeVariable("s"), VLogExpressions.makeVariable("p"), VLogExpressions.makeVariable("o"))); @@ -65,8 +64,8 @@ private static List> getTernaryQueryResults(final VLog vLog, final St } @Test - public void testLoadDataFromRdfString() - throws AlreadyStartedException, EDBConfigurationException, IOException, NotStartedException { + public void testLoadDataFromRdfString() throws AlreadyStartedException, EDBConfigurationException, IOException, + NotStartedException, NonExistingPredicateException { final String ternaryPredicateEDBConfig = "EDB0_predname=" + unzippedTernaryPredicateName + "\n" + "EDB0_type=INMEMORY" + "\n" + "EDB0_param0=" + FileDataSourceTestUtils.INPUT_FOLDER + "\n" + "EDB0_param1=" + FileDataSourceTestUtils.unzippedNtFileRoot + "\n" + "EDB1_predname=" @@ -82,10 +81,26 @@ public void testLoadDataFromRdfString() assertEquals(expectedTernaryQueryResult, queryResult); assertEquals(queryResult, queryResultZipped); - final List> queryResultsEmpty = getTernaryQueryResults(vLog, emptyTernaryPredicateName); - assertTrue(queryResultsEmpty.isEmpty()); - vLog.stop(); } + @Test(expected = NonExistingPredicateException.class) + public void testLoadDataFromRdfStringNonExistingPredicate() throws AlreadyStartedException, + EDBConfigurationException, IOException, NotStartedException, NonExistingPredicateException { + final String ternaryPredicateEDBConfig = "EDB0_predname=" + unzippedTernaryPredicateName + "\n" + + "EDB0_type=INMEMORY" + "\n" + "EDB0_param0=" + FileDataSourceTestUtils.INPUT_FOLDER + "\n" + + "EDB0_param1=" + FileDataSourceTestUtils.unzippedNtFileRoot + "\n" + "EDB1_predname=" + + zippedTernaryPredicateName + "\n" + "EDB1_type=INMEMORY" + "\n" + "EDB1_param0=" + + FileDataSourceTestUtils.INPUT_FOLDER + "\n" + "EDB1_param1=" + + FileDataSourceTestUtils.zippedNtFileRoot; + + final VLog vLog = new VLog(); + try { + vLog.start(ternaryPredicateEDBConfig, false); + getTernaryQueryResults(vLog, emptyTernaryPredicateName); + } finally { + vLog.stop(); + } + } + } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogQueryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogQueryTest.java index d863770a3..801c43e65 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogQueryTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogQueryTest.java @@ -28,6 +28,7 @@ import karmaresearch.vlog.Atom; import karmaresearch.vlog.EDBConfigurationException; +import karmaresearch.vlog.NonExistingPredicateException; import karmaresearch.vlog.NotStartedException; import karmaresearch.vlog.Rule; import karmaresearch.vlog.Term; @@ -48,7 +49,7 @@ public class VLogQueryTest { private final Atom queryAtomQPredicate = VLogExpressions.makeAtom("q", variableZ); @Test - public void queryResultWithBlanksExcludeBlanks() throws EDBConfigurationException, NotStartedException { + public void queryResultWithBlanksExcludeBlanks() throws EDBConfigurationException, NotStartedException, NonExistingPredicateException { final VLog vLog = new VLog(); vLog.addData("p", pFactArguments); @@ -61,7 +62,7 @@ public void queryResultWithBlanksExcludeBlanks() throws EDBConfigurationExceptio } @Test - public void queryResultWithBlanksInludeBlanks() throws EDBConfigurationException, NotStartedException { + public void queryResultWithBlanksInludeBlanks() throws EDBConfigurationException, NotStartedException, NonExistingPredicateException { final VLog vLog = new VLog(); vLog.addData("p", pFactArguments); vLog.setRules(new Rule[] { ruleWithExistentials }, RuleRewriteStrategy.NONE); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogTermNamesTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogTermNamesTest.java index c9d3b9f12..0124f6eb6 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogTermNamesTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogTermNamesTest.java @@ -34,6 +34,7 @@ import karmaresearch.vlog.AlreadyStartedException; import karmaresearch.vlog.EDBConfigurationException; +import karmaresearch.vlog.NonExistingPredicateException; import karmaresearch.vlog.NotStartedException; import karmaresearch.vlog.Rule; import karmaresearch.vlog.Term; @@ -44,7 +45,7 @@ public class VLogTermNamesTest { @Test - public void testTermCase() throws EDBConfigurationException, NotStartedException { + public void testTermCase() throws EDBConfigurationException, NotStartedException, NonExistingPredicateException { final String[][] argsAMatrix = { { "A" }, { "a" } }; final karmaresearch.vlog.Term varX = VLogExpressions.makeVariable("x"); final karmaresearch.vlog.Atom atomBx = new karmaresearch.vlog.Atom("b", varX); @@ -70,11 +71,6 @@ public void testTermCase() throws EDBConfigurationException, NotStartedException assertFalse(queryResultIteratorBx1.hasNext()); queryResultIteratorBx1.close(); - // Querying x(?X) before materialize - final TermQueryResultIterator queryResultIteratorXx = vLog.query(new karmaresearch.vlog.Atom("x", varX)); - assertFalse(queryResultIteratorXx.hasNext()); - queryResultIteratorBx1.close(); - vLog.materialize(true); // Querying b(?X) after materialize @@ -91,7 +87,7 @@ public void testTermCase() throws EDBConfigurationException, NotStartedException @Test public void testSupportedConstantNames() - throws AlreadyStartedException, EDBConfigurationException, IOException, NotStartedException { + throws AlreadyStartedException, EDBConfigurationException, IOException, NotStartedException, NonExistingPredicateException { final String constantNameNumber = "1"; final String constantNameStartsWithNumber = "12_13_14"; final String[][] argsAMatrix = { { constantNameNumber }, { constantNameStartsWithNumber } }; From a5c77df61d1bec549e7caa4a4b5ea45683aa6074 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 21 Aug 2019 17:28:29 +0200 Subject: [PATCH 0146/1003] No tail recursion (insufficient here) --- .../semanticweb/vlog4j/examples/SimpleReasoningExample.java | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java index ca9df94b8..b3750e0bf 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java @@ -58,7 +58,7 @@ public static void main(final String[] args) throws IOException { + "zipLocation(\"01069\", dresden) . \n" // + "% --- Standard recursion: locations are transitive --- \n" // + "locatedIn(?X,?Y) :- location(?X,?Y) . \n" // - + "locatedIn(?X,?Z) :- location(?X,?Y), locatedIn(?Y,?Z) . \n" // + + "locatedIn(?X,?Z) :- locatedIn(?X,?Y), locatedIn(?Y,?Z) . \n" // + "% --- Build address facts using the city constant --- \n" // + "address(?Uni, ?Street, ?ZIP, ?City) :- streetAddress(?Uni, ?Street, ?ZIP, ?CityName), zipLocation(?ZIP, ?City) . \n" + "% --- Value invention: universities have some address --- \n" // @@ -78,8 +78,7 @@ public static void main(final String[] args) throws IOException { } try (final Reasoner reasoner = new VLogReasoner(kb)) { - - System.out.print("Loading rules and facts ... "); + System.out.print("Loading knowledge base ... "); reasoner.load(); System.out.println("done."); From 32861715bbe7391917250fde5bf97475709710d2 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Wed, 21 Aug 2019 17:31:20 +0200 Subject: [PATCH 0147/1003] test URIs starting with _: --- .../vlog4j/syntax/parser/RuleParserTest.java | 21 +++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index c78808e7f..e66be4217 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -435,4 +435,25 @@ public void testSparqlSourceMalformedUrl() throws ParsingException, MalformedURL ruleParser.parse(input); } + @Test(expected = ParsingException.class) + public void testBlankPrefixDeclaration() throws ParsingException { + String input = "@prefix _: . s(c) ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + } + + @Test(expected = ParsingException.class) + public void testBlankNodeTerm() throws ParsingException { + String input = "(_:blank) ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + } + + @Test(expected = ParsingException.class) + public void testBlankPredicateName() throws ParsingException { + String input = "_:(a) ."; + RuleParser ruleParser = new RuleParser(); + ruleParser.parse(input); + } + } From 124a6b7fa924e01889d29b0addd1ce3808ed5cdb Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 21 Aug 2019 17:37:30 +0200 Subject: [PATCH 0148/1003] fixed counting data sources --- .../core/reasoner/implementation/VLogReasoner.java | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 4e09af18d..b282ac147 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -313,10 +313,6 @@ public void load() for (final Statement statement : knowledgeBase) { statement.accept(visitor); } -// System.out.println("\nEDB: " + edbPredicates); -// System.out.println("\nIDB: " + idbPredicates); -// System.out.println("\nEDB with aliases: " + aliasedEdbPredicates); -// System.out.println("\nAliases for EDBs: " + aliasesForEdbPredicates); if (edbPredicates.isEmpty() && aliasedEdbPredicates.isEmpty()) { LOGGER.warn("No facts have been provided."); @@ -349,16 +345,16 @@ String getDataSourceConfigurationString() { if (dataSourceDeclaration.getDataSource() != null) { formatter.format(dataSourceDeclaration.getDataSource().toConfigString(), dataSourceIndex, ModelToVLogConverter.toVLogPredicate(predicate)); + dataSourceIndex++; } - dataSourceIndex++; } for (final DataSourceDeclaration dataSourceDeclaration : this.aliasesForEdbPredicates.keySet()) { final Predicate aliasPredicate = this.aliasesForEdbPredicates.get(dataSourceDeclaration); if (dataSourceDeclaration.getDataSource() != null) { formatter.format(dataSourceDeclaration.getDataSource().toConfigString(), dataSourceIndex, ModelToVLogConverter.toVLogPredicate(aliasPredicate)); + dataSourceIndex++; } - dataSourceIndex++; } formatter.close(); return configStringBuilder.toString(); @@ -489,8 +485,7 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBla @Override public MaterialisationState exportQueryAnswersToCsv(final PositiveLiteral query, final String csvFilePath, - final boolean includeBlanks) - throws ReasonerStateException, IOException { + final boolean includeBlanks) throws ReasonerStateException, IOException { final boolean filterBlanks = !includeBlanks; if (this.reasonerState == ReasonerState.BEFORE_LOADING) { throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); From 793f0219d36534c163e7570e9800f1941948de96 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 21 Aug 2019 17:43:01 +0200 Subject: [PATCH 0149/1003] small reordering --- .../reasoner/implementation/VLogReasoner.java | 59 +++++++++---------- 1 file changed, 29 insertions(+), 30 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index b282ac147..1a3755d7c 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -411,6 +411,34 @@ void validateDataSourcePredicateArity(Predicate predicate, DataSource dataSource } } + void loadFacts() { + for (final Predicate predicate : directEdbFacts.keySet()) { + Predicate aliasPredicate; + if (edbPredicates.containsKey(predicate)) { + aliasPredicate = predicate; + } else { + aliasPredicate = aliasesForEdbPredicates.get(new LocalFactsDataSourceDeclaration(predicate)); + } + try { + this.vLog.addData(ModelToVLogConverter.toVLogPredicate(aliasPredicate), + ModelToVLogConverter.toVLogFactTuples(directEdbFacts.get(predicate))); + } catch (final EDBConfigurationException e) { + throw new RuntimeException("Invalid data sources configuration.", e); + } + } + } + + void loadRules() { + final karmaresearch.vlog.Rule[] vLogRuleArray = ModelToVLogConverter.toVLogRuleArray(rules); + final karmaresearch.vlog.VLog.RuleRewriteStrategy vLogRuleRewriteStrategy = ModelToVLogConverter + .toVLogRuleRewriteStrategy(this.ruleRewriteStrategy); + try { + this.vLog.setRules(vLogRuleArray, vLogRuleRewriteStrategy); + } catch (final NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state.", e); + } + } + @Override public boolean reason() throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { @@ -516,8 +544,7 @@ public void resetReasoner() throws ReasonerStateException { } this.reasonerState = ReasonerState.BEFORE_LOADING; this.vLog.stop(); - LOGGER.warn( - "Reasoner has been reset. All inferences computed during reasoning have been discarded. More data and rules can be added after resetting. The reasoner needs to be loaded again to perform querying and reasoning."); + LOGGER.info("Reasoner has been reset. All inferences computed during reasoning have been discarded."); } @Override @@ -527,34 +554,6 @@ public void close() { this.vLog.stop(); } - void loadFacts() { - for (final Predicate predicate : directEdbFacts.keySet()) { - Predicate aliasPredicate; - if (edbPredicates.containsKey(predicate)) { - aliasPredicate = predicate; - } else { - aliasPredicate = aliasesForEdbPredicates.get(new LocalFactsDataSourceDeclaration(predicate)); - } - try { - this.vLog.addData(ModelToVLogConverter.toVLogPredicate(aliasPredicate), - ModelToVLogConverter.toVLogFactTuples(directEdbFacts.get(predicate))); - } catch (final EDBConfigurationException e) { - throw new RuntimeException("Invalid data sources configuration.", e); - } - } - } - - void loadRules() { - final karmaresearch.vlog.Rule[] vLogRuleArray = ModelToVLogConverter.toVLogRuleArray(rules); - final karmaresearch.vlog.VLog.RuleRewriteStrategy vLogRuleRewriteStrategy = ModelToVLogConverter - .toVLogRuleRewriteStrategy(this.ruleRewriteStrategy); - try { - this.vLog.setRules(vLogRuleArray, vLogRuleRewriteStrategy); - } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); - } - } - @Override public void setLogLevel(LogLevel logLevel) throws ReasonerStateException { if (this.reasonerState.equals(ReasonerState.AFTER_CLOSING)) { From be2d05afadc734ef2ab75dc52bcde183cadcb33f Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 22 Aug 2019 00:05:48 +0200 Subject: [PATCH 0150/1003] More and fixed tests --- .../reasoner/implementation/VLogReasoner.java | 47 ++--- .../src/test/data/input/unaryFactsCD.csv | 2 + .../LoadDataFromMemoryTest.java | 85 --------- ...sonerTest.java => VLogReasonerBasics.java} | 2 +- .../VLogReasonerCombinedInputs.java | 167 ++++++++++++++++++ ...ileTest.java => VLogReasonerCsvInput.java} | 2 +- ...leTest.java => VLogReasonerCsvOutput.java} | 2 +- ...ionTest.java => VLogReasonerNegation.java} | 27 +-- ...ileTest.java => VLogReasonerRdfInput.java} | 2 +- ...Test.java => VLogReasonerSparqlInput.java} | 2 +- 10 files changed, 193 insertions(+), 145 deletions(-) create mode 100644 vlog4j-core/src/test/data/input/unaryFactsCD.csv delete mode 100644 vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java rename vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/{ReasonerTest.java => VLogReasonerBasics.java} (96%) create mode 100644 vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java rename vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/{LoadDataFromCsvFileTest.java => VLogReasonerCsvInput.java} (99%) rename vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/{ExportQueryAnswersToCsvFileTest.java => VLogReasonerCsvOutput.java} (99%) rename vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/{StratifiedNegationTest.java => VLogReasonerNegation.java} (85%) rename vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/{LoadDataFromRdfFileTest.java => VLogReasonerRdfInput.java} (99%) rename vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/{LoadDataFromSparqlQueryTest.java => VLogReasonerSparqlInput.java} (99%) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 1a3755d7c..3f0c57285 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -199,9 +199,9 @@ void addEdbAlias(DataSourceDeclaration dataSourceDeclaration) { final Predicate predicate = dataSourceDeclaration.getPredicate(); Predicate aliasPredicate; if (dataSourceDeclaration instanceof LocalFactsDataSourceDeclaration) { - aliasPredicate = new PredicateImpl(predicate.getName() + "_FACT", predicate.getArity()); + aliasPredicate = new PredicateImpl(predicate.getName() + "##FACT", predicate.getArity()); } else { - aliasPredicate = new PredicateImpl(predicate.getName() + "_" + predicate.hashCode(), + aliasPredicate = new PredicateImpl(predicate.getName() + "##" + dataSourceDeclaration.hashCode(), predicate.getArity()); } aliasesForEdbPredicates.put(dataSourceDeclaration, aliasPredicate); @@ -302,8 +302,7 @@ public RuleRewriteStrategy getRuleRewriteStrategy() { } @Override - public void load() - throws EdbIdbSeparationException, IOException, IncompatiblePredicateArityException, ReasonerStateException { + public void load() throws IOException, IncompatiblePredicateArityException, ReasonerStateException { if (this.reasonerState == ReasonerState.AFTER_CLOSING) { throw new ReasonerStateException(this.reasonerState, "Loading is not allowed after closing."); } @@ -360,30 +359,6 @@ String getDataSourceConfigurationString() { return configStringBuilder.toString(); } -// String generateDataSourcesConfig() { -// final StringBuilder configStringBuilder = new StringBuilder(); -// int dataSourceIndex = 0; -// for (final Predicate predicate : this.knowledgeBase.getDataSourceForPredicate().keySet()) { -// final DataSource dataSource = this.knowledgeBase.getDataSourceForPredicate().get(predicate); -// try (final Formatter formatter = new Formatter(configStringBuilder)) { -// formatter.format(dataSource.toConfigString(), dataSourceIndex, -// ModelToVLogConverter.toVLogPredicate(predicate)); -// } -// dataSourceIndex++; -// } -// return configStringBuilder.toString(); -// } - -// private void validateEdbIdbSeparation() throws EdbIdbSeparationException { -// final Set edbPredicates = this.knowledgeBase.getEdbPredicates(); -// final Set idbPredicates = this.knowledgeBase.getIdbPredicates(); -// final Set intersection = new HashSet<>(edbPredicates); -// intersection.retainAll(idbPredicates); -// if (!intersection.isEmpty()) { -// throw new EdbIdbSeparationException(intersection); -// } -// } - void validateDataSourcePredicateArities() throws IncompatiblePredicateArityException { for (final Predicate predicate : edbPredicates.keySet()) { validateDataSourcePredicateArity(predicate, edbPredicates.get(predicate).getDataSource()); @@ -420,8 +395,15 @@ void loadFacts() { aliasPredicate = aliasesForEdbPredicates.get(new LocalFactsDataSourceDeclaration(predicate)); } try { - this.vLog.addData(ModelToVLogConverter.toVLogPredicate(aliasPredicate), - ModelToVLogConverter.toVLogFactTuples(directEdbFacts.get(predicate))); + String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(aliasPredicate); + String[][] vLogPredicateTuples = ModelToVLogConverter.toVLogFactTuples(directEdbFacts.get(predicate)); + this.vLog.addData(vLogPredicateName, vLogPredicateTuples); + if (LOGGER.isDebugEnabled()) { + for (String[] tuple : vLogPredicateTuples) { + LOGGER.debug( + "Loaded direct fact " + vLogPredicateName + "(" + Arrays.deepToString(tuple) + ")"); + } + } } catch (final EDBConfigurationException e) { throw new RuntimeException("Invalid data sources configuration.", e); } @@ -434,6 +416,11 @@ void loadRules() { .toVLogRuleRewriteStrategy(this.ruleRewriteStrategy); try { this.vLog.setRules(vLogRuleArray, vLogRuleRewriteStrategy); + if (LOGGER.isDebugEnabled()) { + for (karmaresearch.vlog.Rule rule : vLogRuleArray) { + LOGGER.debug("Loaded rule " + rule.toString()); + } + } } catch (final NotStartedException e) { throw new RuntimeException("Inconsistent reasoner state.", e); } diff --git a/vlog4j-core/src/test/data/input/unaryFactsCD.csv b/vlog4j-core/src/test/data/input/unaryFactsCD.csv new file mode 100644 index 000000000..18ebd8521 --- /dev/null +++ b/vlog4j-core/src/test/data/input/unaryFactsCD.csv @@ -0,0 +1,2 @@ +c +d diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java deleted file mode 100644 index 75af663b4..000000000 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java +++ /dev/null @@ -1,85 +0,0 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.io.IOException; -import java.util.Arrays; - -import org.junit.Test; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; - -import karmaresearch.vlog.EDBConfigurationException; - -public class LoadDataFromMemoryTest { - - @Test(expected = EdbIdbSeparationException.class) - public void loadEdbIdbNotSeparated() throws EDBConfigurationException, IOException, EdbIdbSeparationException, - ReasonerStateException, IncompatiblePredicateArityException { - final Variable vx = Expressions.makeVariable("x"); - final Rule rule = Expressions.makeRule(Expressions.makePositiveLiteral("q", vx), - Expressions.makePositiveLiteral("p", vx)); - - final Fact factIDBpredQ1 = Expressions.makeFact("q", Arrays.asList(Expressions.makeConstant("c"))); - final Fact factEDBpredQ2 = Expressions.makeFact("q", - Arrays.asList(Expressions.makeConstant("d"), Expressions.makeConstant("d"))); - final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(rule, factIDBpredQ1, factEDBpredQ2); - - try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.load(); - } - } - - @Test - public void loadEdbIdbSeparated() throws EDBConfigurationException, IOException, EdbIdbSeparationException, - ReasonerStateException, IncompatiblePredicateArityException { - final Variable vx = Expressions.makeVariable("x"); - final Rule rule = Expressions.makeRule(Expressions.makePositiveLiteral("q", vx), - Expressions.makePositiveLiteral("p", vx)); - - final Fact factEDBpred = Expressions.makeFact("q", - Arrays.asList(Expressions.makeConstant("d"), Expressions.makeConstant("d"))); - final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(rule, factEDBpred); - - try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.load(); - } - } - - // TODO move to a test class for KnowledgeBase - @Test(expected = IllegalArgumentException.class) - public void addFactsWithVariableTerms() throws ReasonerStateException { - - final Fact factWithVariableTerms = Expressions.makeFact("q", - Arrays.asList(Expressions.makeConstant("d"), Expressions.makeVariable("x"))); - final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatement(factWithVariableTerms); - } - -} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerBasics.java similarity index 96% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerTest.java rename to vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerBasics.java index ff3a60ae6..fb0b13a35 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerBasics.java @@ -44,7 +44,7 @@ import karmaresearch.vlog.EDBConfigurationException; -public class ReasonerTest { +public class VLogReasonerBasics { final String constantNameC = "c"; final String constantNameD = "d"; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java new file mode 100644 index 000000000..18d1b81f6 --- /dev/null +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java @@ -0,0 +1,167 @@ +package org.semanticweb.vlog4j.core.reasoner.implementation; + +import static org.junit.Assert.assertEquals; + +import java.io.File; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Set; + +import org.junit.Test; +import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.model.api.Variable; +import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; + +public class VLogReasonerCombinedInputs { + + final Variable vx = Expressions.makeVariable("x"); + final Predicate q = Expressions.makePredicate("q", 1); + final Rule rulePimpliesQ = Expressions.makeRule(Expressions.makePositiveLiteral("q", vx), + Expressions.makePositiveLiteral("p", vx)); + + final Fact factQc = Expressions.makeFact(q, Arrays.asList(Expressions.makeConstant("c"))); + final Fact factQc1 = Expressions.makeFact(q, Arrays.asList(Expressions.makeConstant("c1"))); + final Fact factQc2 = Expressions.makeFact(q, Arrays.asList(Expressions.makeConstant("c2"))); + final Fact factQd = Expressions.makeFact(q, Arrays.asList(Expressions.makeConstant("d"))); + final Fact factPd = Expressions.makeFact("p", Arrays.asList(Expressions.makeConstant("d"))); + final PositiveLiteral queryQx = Expressions.makePositiveLiteral(q, Arrays.asList(Expressions.makeVariable("x"))); + + final Set> resultsCC1C2D = Set.of(Collections.singletonList(Expressions.makeConstant("c")), + Collections.singletonList(Expressions.makeConstant("c1")), + Collections.singletonList(Expressions.makeConstant("c2")), + Collections.singletonList(Expressions.makeConstant("d"))); + + final DataSourceDeclaration qFromCsv; + final DataSourceDeclaration qCDFromCsv; + + public VLogReasonerCombinedInputs() throws IOException { + qFromCsv = new DataSourceDeclarationImpl(q, new CsvFileDataSource(new File( + FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.unzippedUnaryCsvFileRoot + ".csv"))); + qCDFromCsv = new DataSourceDeclarationImpl(q, + new CsvFileDataSource(new File(FileDataSourceTestUtils.INPUT_FOLDER + "unaryFactsCD.csv"))); + } + + @Test + public void samePredicateSourceFactRule() + throws IOException, ReasonerStateException, IncompatiblePredicateArityException, EdbIdbSeparationException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(qFromCsv, factQc, factPd, rulePimpliesQ); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + reasoner.reason(); + final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryQx, true); + final Set> queryResult = QueryResultsUtils.collectQueryResults(queryResultIterator); + assertEquals(resultsCC1C2D, queryResult); + } + } + + @Test + public void samePredicateFactSourceRule() + throws IOException, ReasonerStateException, IncompatiblePredicateArityException, EdbIdbSeparationException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factQc, factPd, qFromCsv, rulePimpliesQ); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + reasoner.reason(); + final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryQx, true); + final Set> queryResult = QueryResultsUtils.collectQueryResults(queryResultIterator); + assertEquals(resultsCC1C2D, queryResult); + } + } + + @Test + public void samePredicateRuleFactSource() + throws IOException, ReasonerStateException, IncompatiblePredicateArityException, EdbIdbSeparationException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(rulePimpliesQ, factQc, factPd, qFromCsv); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + reasoner.reason(); + final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryQx, true); + final Set> queryResult = QueryResultsUtils.collectQueryResults(queryResultIterator); + assertEquals(resultsCC1C2D, queryResult); + } + } + + @Test + public void samePredicateSourceSource() + throws IOException, ReasonerStateException, IncompatiblePredicateArityException, EdbIdbSeparationException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(qFromCsv, qCDFromCsv); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + reasoner.reason(); + final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryQx, true); + final Set> queryResult = QueryResultsUtils.collectQueryResults(queryResultIterator); + assertEquals(resultsCC1C2D, queryResult); + } + } + + @Test + public void samePredicateSourceFactFact() + throws IOException, ReasonerStateException, IncompatiblePredicateArityException, EdbIdbSeparationException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(qFromCsv, factQc, factQd); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + reasoner.reason(); + final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryQx, true); + final Set> queryResult = QueryResultsUtils.collectQueryResults(queryResultIterator); + assertEquals(resultsCC1C2D, queryResult); + } + } + + @Test + public void samePredicateFactsRule() + throws IOException, ReasonerStateException, IncompatiblePredicateArityException, EdbIdbSeparationException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPd, factQc, factQc1, factQc2, rulePimpliesQ); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + reasoner.reason(); + final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryQx, true); + final Set> queryResult = QueryResultsUtils.collectQueryResults(queryResultIterator); + assertEquals(resultsCC1C2D, queryResult); + } + } + +} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromCsvFileTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvInput.java similarity index 99% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromCsvFileTest.java rename to vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvInput.java index b96d868a2..36555fda1 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromCsvFileTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvInput.java @@ -46,7 +46,7 @@ import karmaresearch.vlog.EDBConfigurationException; -public class LoadDataFromCsvFileTest { +public class VLogReasonerCsvInput { private static final Predicate unaryPredicate1 = Expressions.makePredicate("p", 1); private static final Predicate unaryPredicate2 = Expressions.makePredicate("q", 1); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExportQueryAnswersToCsvFileTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvOutput.java similarity index 99% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExportQueryAnswersToCsvFileTest.java rename to vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvOutput.java index 8c63a0f8e..e116aefda 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExportQueryAnswersToCsvFileTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvOutput.java @@ -38,7 +38,7 @@ import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -public class ExportQueryAnswersToCsvFileTest { +public class VLogReasonerCsvOutput { @Test public void testEDBQuerySameConstantSubstitutesSameVariableName() diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/StratifiedNegationTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerNegation.java similarity index 85% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/StratifiedNegationTest.java rename to vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerNegation.java index 88809c236..ebd8be56c 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/StratifiedNegationTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerNegation.java @@ -46,29 +46,7 @@ import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -public class StratifiedNegationTest { - - @Test(expected = EdbIdbSeparationException.class) - public void testNotStratifiableEdbIdbSeparation() - throws EdbIdbSeparationException, IncompatiblePredicateArityException, ReasonerStateException, IOException { - - final Variable x = makeVariable("x"); - final Variable y = makeVariable("y"); - - final Literal pXY = makePositiveLiteral("P", x, y); - final Literal notQXY = makeNegativeLiteral("Q", x, y); - final PositiveLiteral qXY = makePositiveLiteral("Q", x, y); - - final Rule rule = makeRule(qXY, pXY, notQXY); - final Fact fact = makeFact("Q", Arrays.asList(makeConstant("c"), makeConstant("d"))); - - final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(rule, fact); - - try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.load(); - } - } +public class VLogReasonerNegation { @Test(expected = RuntimeException.class) public void testNotStratifiable() @@ -82,10 +60,9 @@ public void testNotStratifiable() final PositiveLiteral qXY = makePositiveLiteral("Q", x, y); final Rule rule = makeRule(qXY, pXY, notQXY); - final Fact fact = makeFact("P", Arrays.asList(makeConstant("c"), makeConstant("d"))); final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(rule, fact); + kb.addStatement(rule); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerRdfInput.java similarity index 99% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java rename to vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerRdfInput.java index c26a6ab22..264237ebb 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromRdfFileTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerRdfInput.java @@ -47,7 +47,7 @@ import karmaresearch.vlog.EDBConfigurationException; -public class LoadDataFromRdfFileTest { +public class VLogReasonerRdfInput { private static final Predicate ternaryPredicate = Expressions.makePredicate("triple", 3); private static final PositiveLiteral queryAtom = Expressions.makePositiveLiteral(ternaryPredicate, diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromSparqlQueryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerSparqlInput.java similarity index 99% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromSparqlQueryTest.java rename to vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerSparqlInput.java index 0b1607042..b5a03c7e8 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromSparqlQueryTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerSparqlInput.java @@ -40,7 +40,7 @@ import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -public class LoadDataFromSparqlQueryTest { +public class VLogReasonerSparqlInput { /** * Tests the query "SELECT ?b ?a WHERE {?a p:P22 ?b}" From 6cf133f17bee6ac70875f5d960588d1a2292f654 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 22 Aug 2019 00:22:54 +0200 Subject: [PATCH 0151/1003] add TODO to class documentation --- .../core/reasoner/implementation/VLogReasoner.java | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 3f0c57285..45301cacd 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -71,6 +71,19 @@ * #L% */ +/** + * Reasoner implementation using the VLog backend. + * + * @TODO Due to automatic predicate renaming, it can happen that an EDB + * predicate cannot be queried after loading unless reasoning has already + * been invoked (since the auxiliary rule that imports the EDB facts to + * the "real" predicate must be used). This issue could be weakened by + * rewriting queries to (single-source) EDB predicates internally when in + * such a state, + * + * @author Markus Kroetzsch + * + */ public class VLogReasoner implements Reasoner { private static Logger LOGGER = LoggerFactory.getLogger(VLogReasoner.class); From 7a7955f3e61208e18de19148a0afed3e4c8d3e6e Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 22 Aug 2019 00:25:54 +0200 Subject: [PATCH 0152/1003] Remove EDB/IDB separation --- .../examples/core/AddDataFromCsvFile.java | 39 ++++++++----------- .../examples/core/AddDataFromRdfFile.java | 28 +++++++------ .../SkolemVsRestrictedChaseTermination.java | 27 ++++++------- 3 files changed, 40 insertions(+), 54 deletions(-) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java index 34385f56d..371fcc530 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java @@ -60,21 +60,16 @@ public static void main(final String[] args) throws EdbIdbSeparationException, I ExamplesUtils.configureLogging(); final String rules = "" // first declare file inputs: - + "@source bicycleEDB(1) : load-csv(\"" + ExamplesUtils.INPUT_FOLDER + "bicycleEDB.csv.gz\") ." - + "@source hasPartEDB(2) : load-csv(\"" + ExamplesUtils.INPUT_FOLDER + "hasPartEDB.csv.gz\") ." - + "@source wheelEDB(1) : load-csv(\"" + ExamplesUtils.INPUT_FOLDER + "wheelEDB.csv.gz\") ." - // rules to load all data from the file-based ("EDB") predicates: - + "bicycleIDB(?X) :- bicycleEDB(?X) ." // - + "wheelIDB(?X) :- wheelEDB(?X) ." // - + "hasPartIDB(?X, ?Y) :- hasPartEDB(?X, ?Y) ." // - + "isPartOfIDB(?X, ?Y) :- isPartOfEDB(?X, ?Y) ." + + "@source bicycle(1) : load-csv(\"" + ExamplesUtils.INPUT_FOLDER + "bicycleEDB.csv.gz\") ." + + "@source hasPart(2) : load-csv(\"" + ExamplesUtils.INPUT_FOLDER + "hasPartEDB.csv.gz\") ." + + "@source wheel(1) : load-csv(\"" + ExamplesUtils.INPUT_FOLDER + "wheelEDB.csv.gz\") ." // every bicycle has some part that is a wheel: - + "hasPartIDB(?X, !Y), wheelIDB(!Y) :- bicycleIDB(?X) ." + + "hasPart(?X, !Y), wheel(!Y) :- bicycle(?X) ." // every wheel is part of some bicycle: - + "isPartOfIDB(?X, !Y), bicycleIDB(!Y) :- wheelIDB(?X) ." + + "isPartOf(?X, !Y), bicycle(!Y) :- wheel(?X) ." // hasPart and isPartOf are mutually inverse relations: - + "hasPartIDB(?X, ?Y) :- isPartOfIDB(?Y, ?X) ." // - + "isPartOfIDB(?X, ?Y) :- hasPartIDB(?Y, ?X) ."; + + "hasPart(?X, ?Y) :- isPartOf(?Y, ?X) ." // + + "isPartOf(?X, ?Y) :- hasPart(?Y, ?X) ."; final KnowledgeBase kb = RuleParser.parse(rules); @@ -86,23 +81,21 @@ public static void main(final String[] args) throws EdbIdbSeparationException, I reasoner.load(); System.out.println("Before materialisation:"); - ExamplesUtils.printOutQueryAnswers("hasPartEDB(?X, ?Y)", reasoner); + ExamplesUtils.printOutQueryAnswers("hasPart(?X, ?Y)", reasoner); /* The reasoner will use the Restricted Chase by default. */ reasoner.reason(); System.out.println("After materialisation:"); - final PositiveLiteral hasPartIdbXY = RuleParser.parsePositiveLiteral("hasPartIDB(?X, ?Y)"); - ExamplesUtils.printOutQueryAnswers(hasPartIdbXY, reasoner); + final PositiveLiteral hasPartXY = RuleParser.parsePositiveLiteral("hasPart(?X, ?Y)"); + ExamplesUtils.printOutQueryAnswers(hasPartXY, reasoner); /* Exporting query answers to {@code .csv} files. */ - reasoner.exportQueryAnswersToCsv(hasPartIdbXY, ExamplesUtils.OUTPUT_FOLDER + "hasPartIDBXYWithBlanks.csv", - true); - reasoner.exportQueryAnswersToCsv(hasPartIdbXY, - ExamplesUtils.OUTPUT_FOLDER + "hasPartIDBXYWithoutBlanks.csv", false); - - final PositiveLiteral hasPartIDBRedBikeY = RuleParser.parsePositiveLiteral("hasPartIDB(redBike, ?Y)"); - reasoner.exportQueryAnswersToCsv(hasPartIDBRedBikeY, - ExamplesUtils.OUTPUT_FOLDER + "hasPartIDBRedBikeYWithBlanks.csv", true); + reasoner.exportQueryAnswersToCsv(hasPartXY, ExamplesUtils.OUTPUT_FOLDER + "hasPartWithBlanks.csv", true); + reasoner.exportQueryAnswersToCsv(hasPartXY, ExamplesUtils.OUTPUT_FOLDER + "hasPartWithoutBlanks.csv", + false); + final PositiveLiteral hasPartRedBikeY = RuleParser.parsePositiveLiteral("hasPart(redBike, ?Y)"); + reasoner.exportQueryAnswersToCsv(hasPartRedBikeY, + ExamplesUtils.OUTPUT_FOLDER + "hasPartRedBikeWithBlanks.csv", true); } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java index a04ec5d69..48fda4f86 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java @@ -71,16 +71,14 @@ public static void main(final String[] args) throws EdbIdbSeparationException, I + "@prefix ex: ." + "@prefix rdf: ." // specify data sources: - + "@source triplesEDB(3) : load-rdf(\"" + ExamplesUtils.INPUT_FOLDER + "ternaryBicycleEDB.nt.gz\") ." - // rule for loading all triples from file: - + "triplesIDB(?S, ?P, ?O) :- triplesEDB(?S, ?P, ?O) ." + + "@source triple(3) : load-rdf(\"" + ExamplesUtils.INPUT_FOLDER + "ternaryBicycleEDB.nt.gz\") ." // every bicycle has some part that is a wheel: - + "triplesIDB(?S, ex:hasPart, !X), triplesIDB(!X, rdf:type, ex:wheel) :- triplesIDB(?S, rdf:type, ex:bicycle) ." + + "triple(?S, ex:hasPart, !X), triple(!X, rdf:type, ex:wheel) :- triple(?S, rdf:type, ex:bicycle) ." // every wheel is part of some bicycle: - + "triplesIDB(?S, ex:isPartOf, !X) :- triplesIDB(?S, rdf:type, ex:wheel) ." + + "triple(?S, ex:isPartOf, !X) :- triple(?S, rdf:type, ex:wheel) ." // hasPart and isPartOf are mutually inverse relations: - + "triplesIDB(?S, ex:isPartOf, ?O) :- triplesIDB(?O, ex:hasPart, ?S) ." - + "triplesIDB(?S, ex:hasPart, ?O) :- triplesIDB(?O, ex:isPartOf, ?S) ."; + + "triple(?S, ex:isPartOf, ?O) :- triple(?O, ex:hasPart, ?S) ." + + "triple(?S, ex:hasPart, ?O) :- triple(?O, ex:isPartOf, ?S) ."; final KnowledgeBase kb = RuleParser.parse(rules); @@ -93,25 +91,25 @@ public static void main(final String[] args) throws EdbIdbSeparationException, I System.out.println("Before materialisation:"); - ExamplesUtils.printOutQueryAnswers("triplesEDB(?X, , ?Y)", reasoner); + ExamplesUtils.printOutQueryAnswers("triple(?X, , ?Y)", reasoner); /* The reasoner will use the Restricted Chase by default. */ reasoner.reason(); System.out.println("After materialisation:"); final PositiveLiteral hasPartIDB = RuleParser - .parsePositiveLiteral("triplesIDB(?X, , ?Y)"); + .parsePositiveLiteral("triple(?X, , ?Y)"); ExamplesUtils.printOutQueryAnswers(hasPartIDB, reasoner); /* Exporting query answers to {@code .csv} files. */ + reasoner.exportQueryAnswersToCsv(hasPartIDB, ExamplesUtils.OUTPUT_FOLDER + "ternaryHasPartWithBlanks.csv", + true); reasoner.exportQueryAnswersToCsv(hasPartIDB, - ExamplesUtils.OUTPUT_FOLDER + "ternaryHasPartIDBWithBlanks.csv", true); - reasoner.exportQueryAnswersToCsv(hasPartIDB, - ExamplesUtils.OUTPUT_FOLDER + "ternaryHasPartIDBWithoutBlanks.csv", false); + ExamplesUtils.OUTPUT_FOLDER + "ternaryHasPartWithoutBlanks.csv", false); - final PositiveLiteral existsHasPartRedBike = RuleParser.parsePositiveLiteral( - "triplesIDB(, , ?X)"); + final PositiveLiteral existsHasPartRedBike = RuleParser + .parsePositiveLiteral("triple(, , ?X)"); reasoner.exportQueryAnswersToCsv(existsHasPartRedBike, - ExamplesUtils.OUTPUT_FOLDER + "existsHasPartIDBRedBikeWithBlanks.csv", true); + ExamplesUtils.OUTPUT_FOLDER + "existsHasPartRedBikeWithBlanks.csv", true); } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java index 809362158..d07e4d6c5 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java @@ -53,22 +53,17 @@ public static void main(final String[] args) throws ReasonerStateException, EdbI /* 1. Load data and prepare rules. */ final String rules = "" // define some facts: - + "bicycleEDB(bicycle1) ." // - + "hasPartEDB(bicycle1, wheel1) ." // - + "wheelEDB(wheel1) ." // - + "bicycleEDB(bicycle2) ." // - // rules to load all data from the file-based ("EDB") predicates: - + "bicycleIDB(?X) :- bicycleEDB(?X) ." // - + "wheelIDB(?X) :- wheelEDB(?X) ." // - + "hasPartIDB(?X, ?Y) :- hasPartEDB(?X, ?Y) ." // - + "isPartOfIDB(?X, ?Y) :- isPartOfEDB(?X, ?Y) ." // + + "bicycle(bicycle1) ." // + + "hasPart(bicycle1, wheel1) ." // + + "wheel(wheel1) ." // + + "bicycle(bicycle2) ." // // every bicycle has some part that is a wheel: - + "hasPartIDB(?X, !Y), wheelIDB(!Y) :- bicycleIDB(?X) ." // + + "hasPart(?X, !Y), wheel(!Y) :- bicycle(?X) ." // // every wheel is part of some bicycle: - + "isPartOfIDB(?X, !Y), bicycleIDB(!Y) :- wheelIDB(?X) ." // + + "isPartOf(?X, !Y), bicycle(!Y) :- wheel(?X) ." // // hasPart and isPartOf are mutually inverse relations: - + "hasPartIDB(?X, ?Y) :- isPartOfIDB(?Y, ?X) ." // - + "isPartOfIDB(?X, ?Y) :- hasPartIDB(?Y, ?X) ."; + + "hasPart(?X, ?Y) :- isPartOf(?Y, ?X) ." // + + "isPartOf(?X, ?Y) :- hasPart(?Y, ?X) ."; final KnowledgeBase kb = RuleParser.parse(rules); @@ -79,7 +74,7 @@ public static void main(final String[] args) throws ReasonerStateException, EdbI try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); - final PositiveLiteral queryHasPart = RuleParser.parsePositiveLiteral("hasPartIDB(?X, ?Y)"); + final PositiveLiteral queryHasPart = RuleParser.parsePositiveLiteral("hasPart(?X, ?Y)"); /* See that there is no fact HasPartIDB before reasoning. */ System.out.println("Before reasoning is started, no inferrences have been computed yet."); @@ -104,7 +99,7 @@ public static void main(final String[] args) throws ReasonerStateException, EdbI */ final QueryResultIterator answers = reasoner.answerQuery(queryHasPart, true); System.out.println("Before the timeout, the Skolem chase had produced " - + ExamplesUtils.iteratorSize(answers) + " results for hasPartIDB(?X, ?Y)."); + + ExamplesUtils.iteratorSize(answers) + " results for hasPart(?X, ?Y)."); /* * We reset the reasoner and apply the Restricted Chase on the same set of rules @@ -118,7 +113,7 @@ public static void main(final String[] args) throws ReasonerStateException, EdbI * See that there is no fact HasPartIDB before reasoning. All inferred facts * have been discarded when the reasoner was reset. */ - System.out.println("We can verify that there are no inferences for hasPartIDB(?X, ?Y) after reset."); + System.out.println("We can verify that there are no inferences for hasPart(?X, ?Y) after reset."); ExamplesUtils.printOutQueryAnswers(queryHasPart, reasoner); /* From 9a0e6bea53bdde8b4200fa6b865388c13e8e7203 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 22 Aug 2019 08:58:32 +0200 Subject: [PATCH 0153/1003] Unified checks for closed state --- .../reasoner/implementation/VLogReasoner.java | 63 ++++++++++--------- .../implementation/ReasonerStateTest.java | 11 ---- 2 files changed, 33 insertions(+), 41 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 45301cacd..ccde18e68 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -271,10 +271,8 @@ public KnowledgeBase getKnowledgeBase() { @Override public void setAlgorithm(final Algorithm algorithm) { Validate.notNull(algorithm, "Algorithm cannot be null!"); + warnClosed(); this.algorithm = algorithm; - if (this.reasonerState.equals(ReasonerState.AFTER_CLOSING)) { - LOGGER.warn("Setting algorithm on a closed reasoner."); - } } @Override @@ -284,13 +282,11 @@ public Algorithm getAlgorithm() { @Override public void setReasoningTimeout(Integer seconds) { + warnClosed(); if (seconds != null) { Validate.isTrue(seconds > 0, "Only strictly positive timeout period alowed!", seconds); } this.timeoutAfterSeconds = seconds; - if (this.reasonerState.equals(ReasonerState.AFTER_CLOSING)) { - LOGGER.warn("Setting timeout on a closed reasoner."); - } } @Override @@ -299,14 +295,10 @@ public Integer getReasoningTimeout() { } @Override - public void setRuleRewriteStrategy(RuleRewriteStrategy ruleRewritingStrategy) throws ReasonerStateException { + public void setRuleRewriteStrategy(RuleRewriteStrategy ruleRewritingStrategy) { + warnClosed(); Validate.notNull(ruleRewritingStrategy, "Rewrite strategy cannot be null!"); - if (this.reasonerState != ReasonerState.BEFORE_LOADING) { - throw new ReasonerStateException(this.reasonerState, - "Rules cannot be re-writen after the reasoner has been loaded! Call reset() to undo loading and reasoning."); - } this.ruleRewriteStrategy = ruleRewritingStrategy; - LOGGER.warn("Setting rule rewrite strategy on a closed reasoner."); } @Override @@ -316,9 +308,7 @@ public RuleRewriteStrategy getRuleRewriteStrategy() { @Override public void load() throws IOException, IncompatiblePredicateArityException, ReasonerStateException { - if (this.reasonerState == ReasonerState.AFTER_CLOSING) { - throw new ReasonerStateException(this.reasonerState, "Loading is not allowed after closing."); - } + validateNotClosed(); final LoadKbVisitor visitor = new LoadKbVisitor(); visitor.clearIndexes(); @@ -488,14 +478,13 @@ private void runChase() { @Override public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBlanks) throws ReasonerStateException { - final boolean filterBlanks = !includeBlanks; + validateNotClosed(); if (this.reasonerState == ReasonerState.BEFORE_LOADING) { throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); - } else if (this.reasonerState.equals(ReasonerState.AFTER_CLOSING)) { - throw new ReasonerStateException(this.reasonerState, "Querying is not allowed after closing."); } Validate.notNull(query, "Query atom must not be null!"); + final boolean filterBlanks = !includeBlanks; final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); TermQueryResultIterator stringQueryResultIterator; @@ -514,16 +503,15 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBla @Override public MaterialisationState exportQueryAnswersToCsv(final PositiveLiteral query, final String csvFilePath, final boolean includeBlanks) throws ReasonerStateException, IOException { - final boolean filterBlanks = !includeBlanks; + validateNotClosed(); if (this.reasonerState == ReasonerState.BEFORE_LOADING) { throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); - } else if (this.reasonerState.equals(ReasonerState.AFTER_CLOSING)) { - throw new ReasonerStateException(this.reasonerState, "Querying is not allowed after closing."); } Validate.notNull(query, "Query atom must not be null!"); Validate.notNull(csvFilePath, "File to export query answer to must not be null!"); Validate.isTrue(csvFilePath.endsWith(".csv"), "Expected .csv extension for file [%s]!", csvFilePath); + final boolean filterBlanks = !includeBlanks; final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); try { this.vLog.writeQueryResultsToCsv(vLogAtom, csvFilePath, filterBlanks); @@ -538,10 +526,8 @@ public MaterialisationState exportQueryAnswersToCsv(final PositiveLiteral query, @Override public void resetReasoner() throws ReasonerStateException { + validateNotClosed(); // TODO what should happen to the KB? - if (this.reasonerState.equals(ReasonerState.AFTER_CLOSING)) { - throw new ReasonerStateException(this.reasonerState, "Resetting is not allowed after closing."); - } this.reasonerState = ReasonerState.BEFORE_LOADING; this.vLog.stop(); LOGGER.info("Reasoner has been reset. All inferences computed during reasoning have been discarded."); @@ -556,9 +542,7 @@ public void close() { @Override public void setLogLevel(LogLevel logLevel) throws ReasonerStateException { - if (this.reasonerState.equals(ReasonerState.AFTER_CLOSING)) { - throw new ReasonerStateException(this.reasonerState, "Setting log level is not allowed after closing."); - } + validateNotClosed(); Validate.notNull(logLevel, "Log level cannot be null!"); this.internalLogLevel = logLevel; this.vLog.setLogLevel(ModelToVLogConverter.toVLogLogLevel(this.internalLogLevel)); @@ -571,9 +555,7 @@ public LogLevel getLogLevel() { @Override public void setLogFile(String filePath) throws ReasonerStateException { - if (this.reasonerState.equals(ReasonerState.AFTER_CLOSING)) { - throw new ReasonerStateException(this.reasonerState, "Setting log file is not allowed after closing."); - } + validateNotClosed(); this.vLog.setLogFile(filePath); } @@ -690,4 +672,25 @@ private void updateReasonerToKnowledgeBaseChanged() { // } // } + /** + * Check if reasoner is closed and throw an exception if it is. + * + * @throws ReasonerStateException + */ + void validateNotClosed() throws ReasonerStateException { + if (this.reasonerState == ReasonerState.AFTER_CLOSING) { + LOGGER.error("Invalid operation requested on a closed reasoner object."); + throw new ReasonerStateException(this.reasonerState, "Operation not allowed after closing reasoner."); + } + } + + /** + * Check if reasoner is closed and log a warning if it is. + */ + void warnClosed() { + if (this.reasonerState == ReasonerState.AFTER_CLOSING) { + LOGGER.warn("Meaningless operation performed on a closed reasoner object."); + } + } + } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java index 28c35a81f..8a0764a37 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java @@ -153,21 +153,10 @@ public void setRuleRewriteStrategy1() throws ReasonerStateException { } } - @Test(expected = ReasonerStateException.class) - public void setRuleRewriteStrategy2() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance();) { - reasoner.load(); - reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.NONE); - } - } - @Test public void setRuleRewriteStrategy3() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { try (final Reasoner reasoner = Reasoner.getInstance();) { - reasoner.load(); - reasoner.resetReasoner(); reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.NONE); } } From 035fdb5263cbd0073ed4f3b20524392aade95b03 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 22 Aug 2019 09:55:03 +0200 Subject: [PATCH 0154/1003] Removed/runtimed most VLog4j exceptions --- .../exceptions/EdbIdbSeparationException.java | 54 -------- .../IncompatiblePredicateArityException.java | 10 +- .../exceptions/ReasonerStateException.java | 2 +- .../exceptions/VLog4jRuntimeException.java | 43 ++++++ .../vlog4j/core/reasoner/Reasoner.java | 125 ++++-------------- .../reasoner/implementation/VLogReasoner.java | 87 ++++++------ .../vlog4j/core/reasoner/LoggingTest.java | 20 +-- .../core/reasoner/ReasonerTimeoutTest.java | 17 +-- .../implementation/AddDataSourceTest.java | 26 ++-- .../implementation/AnswerQueryTest.java | 33 ++--- .../FileDataSourceTestUtils.java | 9 +- .../GeneratedAnonymousIndividualsTest.java | 18 +-- .../implementation/ReasonerStateTest.java | 49 ++----- .../SparqlQueryResultDataSourceTest.java | 20 ++- .../implementation/VLogReasonerBasics.java | 14 +- .../VLogReasonerCombinedInputs.java | 21 +-- .../implementation/VLogReasonerCsvInput.java | 20 +-- .../implementation/VLogReasonerCsvOutput.java | 18 +-- .../implementation/VLogReasonerNegation.java | 12 +- .../implementation/VLogReasonerRdfInput.java | 33 ++--- .../VLogReasonerSparqlInput.java | 20 +-- .../vlog4j/examples/CountingTriangles.java | 3 - .../vlog4j/examples/DoidExample.java | 4 - .../examples/SimpleReasoningExample.java | 3 - .../examples/core/AddDataFromCsvFile.java | 6 +- .../examples/core/AddDataFromRdfFile.java | 6 +- .../core/AddDataFromSparqlQueryResults.java | 6 +- .../core/ConfigureReasonerLogging.java | 6 +- .../SkolemVsRestrictedChaseTermination.java | 6 +- .../examples/graal/AddDataFromDlgpFile.java | 6 +- .../examples/graal/AddDataFromGraal.java | 6 +- .../examples/graal/DoidExampleGraal.java | 6 +- .../owlapi/OwlOntologyToRulesAndFacts.java | 6 +- .../examples/rdf/AddDataFromRdfModel.java | 7 +- .../vlog4j/rdf/TestReasonOverRdfFacts.java | 12 +- 35 files changed, 234 insertions(+), 500 deletions(-) delete mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/EdbIdbSeparationException.java create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/VLog4jRuntimeException.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/EdbIdbSeparationException.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/EdbIdbSeparationException.java deleted file mode 100644 index af51904c7..000000000 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/EdbIdbSeparationException.java +++ /dev/null @@ -1,54 +0,0 @@ -package org.semanticweb.vlog4j.core.exceptions; - -import java.text.MessageFormat; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.util.Set; - -import org.semanticweb.vlog4j.core.model.api.Predicate; - -/** - * Exception thrown when attempting to load the reasoner with a knowledge base (facts and rules) that contains predicates that - * are both EDB (occur in facts) and IDB (occur in rule heads). Predicates that - * occur in facts cannot appear in rule heads. - * - * @author Irina Dragoste - * - */ -public class EdbIdbSeparationException extends VLog4jException { - - /** - * generated serial version UID - */ - private static final long serialVersionUID = -6731598892649856691L; - - private static final String messagePattern = "The following predicates occur both in facts (EDBs) and rule heads (IDBs): {0}!"; - - /** - * Creates an exception with a logging message for given predicates. - * @param edbIdbPredicates predicates which are both EDB (occur in facts) and IDB (occur in rule heads). - */ - public EdbIdbSeparationException(Set edbIdbPredicates) { - super(MessageFormat.format(messagePattern, edbIdbPredicates)); - } - -} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/IncompatiblePredicateArityException.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/IncompatiblePredicateArityException.java index 46d9075eb..db42ff58c 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/IncompatiblePredicateArityException.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/IncompatiblePredicateArityException.java @@ -24,19 +24,15 @@ import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; /** - * Expression thrown when attempting to load the reasoner with a knowledge base - * that contains facts from a {@link DataSource} (added with - * {@link Reasoner#addFactsFromDataSource(Predicate, DataSource)}), whose arity - * does not correspond to the arity of the {@link Predicate} the data source was - * added for. + * Expression thrown when attempting to load facts for a {@link Predicate} from + * a {@link DataSource} that does not contain data of the specified arity. * * @author Irina Dragoste * */ -public class IncompatiblePredicateArityException extends VLog4jException { +public class IncompatiblePredicateArityException extends VLog4jRuntimeException { private static final long serialVersionUID = -5081219042292721026L; private static final String messagePattern = "Predicate arity [{0}] of predicate [{1}] incompatible with arity [{2}] of the data source [{3}]!"; diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/ReasonerStateException.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/ReasonerStateException.java index 5b20f275b..7d26dbd47 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/ReasonerStateException.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/ReasonerStateException.java @@ -31,7 +31,7 @@ * @author Irina Dragoste * */ -public class ReasonerStateException extends VLog4jException { +public class ReasonerStateException extends VLog4jRuntimeException { /** * generated serial version UID diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/VLog4jRuntimeException.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/VLog4jRuntimeException.java new file mode 100644 index 000000000..56fd985a6 --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/VLog4jRuntimeException.java @@ -0,0 +1,43 @@ +package org.semanticweb.vlog4j.core.exceptions; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * Superclass of unchecked exceptions generated by VLog4j. + * + * @author Markus Kroetzsch + * + */ +public class VLog4jRuntimeException extends RuntimeException { + + /** + * Generated serial version ID. + */ + private static final long serialVersionUID = -6574826887294416900L; + + public VLog4jRuntimeException(String message, Throwable cause) { + super(message, cause); + } + + public VLog4jRuntimeException(String message) { + super(message); + } +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 7a4236f98..a1c1cc72b 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -3,9 +3,6 @@ import java.io.IOException; import org.eclipse.jdt.annotation.Nullable; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; @@ -16,7 +13,6 @@ import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; import karmaresearch.vlog.Atom; -import karmaresearch.vlog.NotStartedException; /* * #%L @@ -58,13 +54,8 @@ * {@link RuleRewriteStrategy}.
    *
    * Once adding facts and rules to the knowledge base has been completed, the - * knowledge base can be loaded into the reasoner. The following - * pre-condition must be respected: the {@link Predicate}s appearing in - * {@link Rule} heads (called IDBs) cannot also appear in knowledge base - * facts (called EDBs). An {@link EdbIdbSeparationException} would be - * thrown when loading the knowledge base.
    + * knowledge base can be loaded into the reasoner. * - *
    * The loaded reasoner can perform atomic queries on explicit facts * before reasoning, and all implicit and explicit facts after calling * {@link Reasoner#reason()}. Queries can provide an iterator for the results @@ -84,6 +75,8 @@ * before loading. Then, more information can be added to the knowledge base, * the reasoner can be loaded again, and querying and reasoning can be * performed. + * + * @FIXME Update the outdated JavaDoc * * @author Irina Dragoste * @@ -159,9 +152,8 @@ public static Reasoner getInstance() { * * @param ruleRewritingStrategy strategy according to which the rules will be * rewritten before reasoning. - * @throws ReasonerStateException if the reasoner has already been loaded. */ - void setRuleRewriteStrategy(RuleRewriteStrategy ruleRewritingStrategy) throws ReasonerStateException; + void setRuleRewriteStrategy(RuleRewriteStrategy ruleRewritingStrategy); /** * Getter for the strategy according to which rules will be rewritten before @@ -177,9 +169,8 @@ public static Reasoner getInstance() { * {@link LogLevel#WARNING} * * @param logLevel the logging level to be set for VLog C++ resource. - * @throws ReasonerStateException if the method is called on a closed reasoner. */ - void setLogLevel(LogLevel logLevel) throws ReasonerStateException; + void setLogLevel(LogLevel logLevel); /** * Returns the logging level of the internal VLog C++ resource. If no value has @@ -197,50 +188,26 @@ public static Reasoner getInstance() { * @param filePath the file for the internal VLog C++ resource to log to. If * {@code null} or an invalid file path, the reasoner will log * to the default system output. - * @throws ReasonerStateException if the method is called on a closed reasoner. */ - void setLogFile(String filePath) throws ReasonerStateException; + void setLogFile(String filePath); /** * Loads the knowledge base, consisting of the current rules and facts, - * into the reasoner (if it has not been loaded yet). If the reasoner has - * already been loaded, this call does nothing. After loading, the reasoner is - * ready for reasoning and querying.
    - * Loading pre-condition: the {@link Predicate}s appearing in - * {@link Rule} heads ({@link Rule#getHead()}), called IDB predicates, cannot - * also appear in knowledge base facts, called EDB predicates. An - * {@link EdbIdbSeparationException} would be thrown in this case. + * into the reasoner (if it has not been loaded yet). After loading, the + * reasoner is ready for reasoning and querying. * - * @throws IOException if an I/O error occurs related to - * the resources in the knowledge - * base to be loaded. - * @throws EdbIdbSeparationException if a {@link Predicate} appearing - * in a {@link Rule} head - * (IDB predicate) also appears in a - * knowledge base fact (EDB - * predicate). - * @throws IncompatiblePredicateArityException if the arity of a - * {@link Predicate} of a fact - * loaded from a data source - * ({@link #addFactsFromDataSource(Predicate, DataSource)}) - * does nor match the arity of the - * facts in the corresponding data - * source. - * @throws ReasonerStateException if the method is called on a - * closed reasoner. + * @throws IOException if an I/O error occurs related to the resources in the + * knowledge base to be loaded. */ - void load() - throws IOException, EdbIdbSeparationException, IncompatiblePredicateArityException, ReasonerStateException; + void load() throws IOException; /** * Checks whether the loaded rules and loaded fact EDB predicates are Acyclic, * Cyclic, or cyclicity cannot be determined. * * @return - * @throws ReasonerStateException - * @throws NotStartedException */ - CyclicityResult checkForCycles() throws ReasonerStateException, NotStartedException; + CyclicityResult checkForCycles(); /** * Check the Joint Acyclicity (JA) property of loaded rules and EDB @@ -253,10 +220,8 @@ void load() * @return {@code true}, if the loaded set of rules is Joint Acyclic with * respect to the EDB predicates of loaded facts.
    * {@code false}, otherwise - * @throws ReasonerStateException - * @throws NotStartedException */ - boolean isJA() throws ReasonerStateException, NotStartedException; + boolean isJA(); /** * Check the Restricted Joint Acyclicity (RJA) property of loaded rules @@ -268,10 +233,8 @@ void load() * @return {@code true}, if the loaded set of rules is Restricted Joint Acyclic * with respect to the EDB predicates of loaded facts.
    * {@code false}, otherwise - * @throws ReasonerStateException - * @throws NotStartedException */ - boolean isRJA() throws ReasonerStateException, NotStartedException; + boolean isRJA(); /** * Check the Model-Faithful Acyclicity (MFA) property of loaded rules and @@ -284,10 +247,8 @@ void load() * @return {@code true}, if the loaded set of rules is Model-Faithful Acyclic * with respect to the EDB predicates of loaded facts.
    * {@code false}, otherwise - * @throws ReasonerStateException - * @throws NotStartedException */ - boolean isMFA() throws ReasonerStateException, NotStartedException; + boolean isMFA(); /** * Check the Restricted Model-Faithful Acyclicity (RMFA) property of @@ -300,10 +261,8 @@ void load() * @return {@code true}, if the loaded set of rules is Restricted Model-Faithful * Acyclic with respect to the EDB predicates of loaded facts.
    * {@code false}, otherwise - * @throws ReasonerStateException - * @throws NotStartedException */ - boolean isRMFA() throws ReasonerStateException, NotStartedException; + boolean isRMFA(); /** * Check the Model-Faithful Cyclicity (MFC) property of loaded rules and @@ -317,10 +276,8 @@ void load() * @return {@code true}, if the loaded set of rules is Model-Faithful Cyclic * with respect to the EDB predicates of loaded facts.
    * {@code false}, otherwise - * @throws ReasonerStateException - * @throws NotStartedException */ - boolean isMFC() throws ReasonerStateException, NotStartedException; + boolean isMFC(); /** * Performs reasoning on the loaded knowledge base, depending on the set @@ -350,17 +307,9 @@ void load() *
  • {@code false}, if reasoning has been interrupted before * completion.
  • * - * @throws IOException if I/O exceptions occur during - * reasoning. - * @throws ReasonerStateException if this method is called before - * loading ({@link Reasoner#load()} - * or after closing - * ({@link Reasoner#close()}). - * @throws IncompatiblePredicateArityException - * @throws EdbIdbSeparationException + * @throws IOException if I/O exceptions occur during reasoning. */ - boolean reason() - throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException; + boolean reason() throws IOException; // TODO add examples to query javadoc /** @@ -394,18 +343,9 @@ boolean reason() * results. Otherwise, the query results will only contain * the facts with terms of type {@link TermType#CONSTANT} * (representing named individuals). - * @return an {@link AutoCloseable} iterator for {@link QueryResult}s, - * representing distinct answers to the query. - * @throws ReasonerStateException if this method is called before loading - * ({@link Reasoner#load()} or after closing - * ({@link Reasoner#close()}). - * - * @throws IllegalArgumentException if the given {@code query} contains terms - * ({@link Atom#getTerms()}) which are not of - * type {@link TermType#CONSTANT} or - * {@link TermType#VARIABLE}. + * @return QueryResultIterator that represents distinct answers to the query. */ - QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBlanks) throws ReasonerStateException; + QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBlanks); // TODO add examples to query javadoc /** @@ -444,35 +384,20 @@ boolean reason() * the facts with terms of type {@link TermType#CONSTANT} * (representing named individuals). * - * @throws ReasonerStateException if this method is called before loading - * ({@link Reasoner#load()} or after closing - * ({@link Reasoner#close()}). - * @throws IOException if an I/O error occurs regarding given file - * ({@code csvFilePath)}. - * @throws IllegalArgumentException - *
      - *
    • if the given {@code queryAtom} contains - * terms ({@link Atom#getTerms()}) which are - * not of type {@link TermType#CONSTANT} or - * {@link TermType#VARIABLE}.
    • - *
    • if the given {@code csvFilePath} does - * not end with .csv - * extension.
    • - *
    + * @throws IOException if an I/O error occurs regarding given file + * ({@code csvFilePath)}. */ // TODO update javadoc with return type MaterialisationState exportQueryAnswersToCsv(PositiveLiteral query, String csvFilePath, boolean includeBlanks) - throws ReasonerStateException, IOException; + throws IOException; /** * Resets the reasoner to a pre-loading state (before the call of * {@link #load()} method). All facts inferred by reasoning are discarded. Rules * and facts added to the reasoner need to be loaded again, to be able to * perform querying and reasoning. - * - * @throws ReasonerStateException if the method is called on a closed reasoner. */ - void resetReasoner() throws ReasonerStateException; + void resetReasoner(); // TODO Map exportDBToDir(File location); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index ccde18e68..56d915054 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -12,7 +12,6 @@ import java.util.Set; import org.apache.commons.lang3.Validate; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.DataSource; @@ -271,7 +270,7 @@ public KnowledgeBase getKnowledgeBase() { @Override public void setAlgorithm(final Algorithm algorithm) { Validate.notNull(algorithm, "Algorithm cannot be null!"); - warnClosed(); + validateNotClosed(); this.algorithm = algorithm; } @@ -282,7 +281,7 @@ public Algorithm getAlgorithm() { @Override public void setReasoningTimeout(Integer seconds) { - warnClosed(); + validateNotClosed(); if (seconds != null) { Validate.isTrue(seconds > 0, "Only strictly positive timeout period alowed!", seconds); } @@ -296,7 +295,7 @@ public Integer getReasoningTimeout() { @Override public void setRuleRewriteStrategy(RuleRewriteStrategy ruleRewritingStrategy) { - warnClosed(); + validateNotClosed(); Validate.notNull(ruleRewritingStrategy, "Rewrite strategy cannot be null!"); this.ruleRewriteStrategy = ruleRewritingStrategy; } @@ -307,7 +306,7 @@ public RuleRewriteStrategy getRuleRewriteStrategy() { } @Override - public void load() throws IOException, IncompatiblePredicateArityException, ReasonerStateException { + public void load() throws IOException { validateNotClosed(); final LoadKbVisitor visitor = new LoadKbVisitor(); @@ -362,6 +361,13 @@ String getDataSourceConfigurationString() { return configStringBuilder.toString(); } + /** + * Checks if the loaded external data sources do in fact contain data of the + * correct arity. + * + * @throws IncompatiblePredicateArityException to indicate a problem + * (non-checked exception) + */ void validateDataSourcePredicateArities() throws IncompatiblePredicateArityException { for (final Predicate predicate : edbPredicates.keySet()) { validateDataSourcePredicateArity(predicate, edbPredicates.get(predicate).getDataSource()); @@ -372,6 +378,16 @@ void validateDataSourcePredicateArities() throws IncompatiblePredicateArityExcep } } + /** + * Checks if the loaded external data for a given source does in fact contain + * data of the correct arity for the given predidate. + * + * @param predicate the predicate for which data is loaded + * @param dataSource the data source used + * + * @throws IncompatiblePredicateArityException to indicate a problem + * (non-checked exception) + */ void validateDataSourcePredicateArity(Predicate predicate, DataSource dataSource) throws IncompatiblePredicateArityException { if (dataSource == null) @@ -430,8 +446,7 @@ void loadRules() { } @Override - public boolean reason() - throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public boolean reason() throws IOException { switch (this.reasonerState) { case BEFORE_LOADING: load(); @@ -470,6 +485,8 @@ private void runChase() { } catch (final NotStartedException e) { throw new RuntimeException("Inconsistent reasoner state.", e); } catch (final MaterializationException e) { + // FIXME: the message generate here is not guaranteed to be the correct + // interpretation of the exception that is caught throw new RuntimeException( "Knowledge base incompatible with stratified negation: either the Rules are not stratifiable, or the variables in negated atom cannot be bound.", e); @@ -477,7 +494,7 @@ private void runChase() { } @Override - public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBlanks) throws ReasonerStateException { + public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBlanks) { validateNotClosed(); if (this.reasonerState == ReasonerState.BEFORE_LOADING) { throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); @@ -502,7 +519,7 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBla @Override public MaterialisationState exportQueryAnswersToCsv(final PositiveLiteral query, final String csvFilePath, - final boolean includeBlanks) throws ReasonerStateException, IOException { + final boolean includeBlanks) throws IOException { validateNotClosed(); if (this.reasonerState == ReasonerState.BEFORE_LOADING) { throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); @@ -525,7 +542,7 @@ public MaterialisationState exportQueryAnswersToCsv(final PositiveLiteral query, } @Override - public void resetReasoner() throws ReasonerStateException { + public void resetReasoner() { validateNotClosed(); // TODO what should happen to the KB? this.reasonerState = ReasonerState.BEFORE_LOADING; @@ -541,7 +558,7 @@ public void close() { } @Override - public void setLogLevel(LogLevel logLevel) throws ReasonerStateException { + public void setLogLevel(LogLevel logLevel) { validateNotClosed(); Validate.notNull(logLevel, "Log level cannot be null!"); this.internalLogLevel = logLevel; @@ -554,61 +571,66 @@ public LogLevel getLogLevel() { } @Override - public void setLogFile(String filePath) throws ReasonerStateException { + public void setLogFile(String filePath) { validateNotClosed(); this.vLog.setLogFile(filePath); } @Override - public boolean isJA() throws ReasonerStateException, NotStartedException { + public boolean isJA() { return checkAcyclicity(AcyclicityNotion.JA); } @Override - public boolean isRJA() throws ReasonerStateException, NotStartedException { + public boolean isRJA() { return checkAcyclicity(AcyclicityNotion.RJA); } @Override - public boolean isMFA() throws ReasonerStateException, NotStartedException { + public boolean isMFA() { return checkAcyclicity(AcyclicityNotion.MFA); } @Override - public boolean isRMFA() throws ReasonerStateException, NotStartedException { + public boolean isRMFA() { return checkAcyclicity(AcyclicityNotion.RMFA); } @Override - public boolean isMFC() throws ReasonerStateException, NotStartedException { + public boolean isMFC() { + validateNotClosed(); if (this.reasonerState == ReasonerState.BEFORE_LOADING) { throw new ReasonerStateException(this.reasonerState, "checking rules acyclicity is not allowed before loading!"); } - final CyclicCheckResult checkCyclic = this.vLog.checkCyclic("MFC"); - if (checkCyclic.equals(CyclicCheckResult.CYCLIC)) { - return true; + CyclicCheckResult checkCyclic; + try { + checkCyclic = this.vLog.checkCyclic("MFC"); + } catch (NotStartedException e) { + throw new RuntimeException(e.getMessage(), e); // should be impossible } - return false; + return checkCyclic.equals(CyclicCheckResult.CYCLIC); } - private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) - throws ReasonerStateException, NotStartedException { + private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) { + validateNotClosed(); if (this.reasonerState == ReasonerState.BEFORE_LOADING) { throw new ReasonerStateException(this.reasonerState, "checking rules acyclicity is not allowed before loading!"); } - final CyclicCheckResult checkCyclic = this.vLog.checkCyclic(acyclNotion.name()); - if (checkCyclic.equals(CyclicCheckResult.NON_CYCLIC)) { - return true; + CyclicCheckResult checkCyclic; + try { + checkCyclic = this.vLog.checkCyclic(acyclNotion.name()); + } catch (NotStartedException e) { + throw new RuntimeException(e.getMessage(), e); // should be impossible } - return false; + return checkCyclic.equals(CyclicCheckResult.NON_CYCLIC); } @Override - public CyclicityResult checkForCycles() throws ReasonerStateException, NotStartedException { + public CyclicityResult checkForCycles() { final boolean acyclic = isJA() || isRJA() || isMFA() || isRMFA(); if (acyclic) { return CyclicityResult.ACYCLIC; @@ -684,13 +706,4 @@ void validateNotClosed() throws ReasonerStateException { } } - /** - * Check if reasoner is closed and log a warning if it is. - */ - void warnClosed() { - if (this.reasonerState == ReasonerState.AFTER_CLOSING) { - LOGGER.warn("Meaningless operation performed on a closed reasoner object."); - } - } - } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java index 161858d51..8ea46a921 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java @@ -32,9 +32,6 @@ import java.util.Arrays; import org.junit.Test; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; @@ -67,8 +64,7 @@ public class LoggingTest { // any time @Test - public void testSetLogFileNull() - throws ReasonerStateException, IOException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public void testSetLogFileNull() throws IOException { try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.setLogFile(null); reasoner.setLogLevel(LogLevel.INFO); @@ -80,8 +76,7 @@ public void testSetLogFileNull() } @Test - public void testSetLogFileInexistent() - throws ReasonerStateException, IOException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public void testSetLogFileInexistent() throws IOException { final String inexistentFilePath = LOGS_FOLDER + "a/b"; try (final VLogReasoner reasoner = new VLogReasoner(kb)) { @@ -97,15 +92,14 @@ public void testSetLogFileInexistent() } @Test(expected = NullPointerException.class) - public void testSetLogLevelNull() throws ReasonerStateException { + public void testSetLogLevelNull() { try (final Reasoner instance = Reasoner.getInstance()) { instance.setLogLevel(null); } } @Test - public void testSetLogFileAppendsToFile() - throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + public void testSetLogFileAppendsToFile() throws IOException { final String logFilePath = LOGS_FOLDER + System.currentTimeMillis() + "-testSetLogFileAppendsToFile.log"; assertFalse(new File(logFilePath).exists()); int countLinesBeforeReset = 0; @@ -130,8 +124,7 @@ public void testSetLogFileAppendsToFile() } @Test - public void testLogLevelInfo() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testLogLevelInfo() throws IOException { final String logFilePath = LOGS_FOLDER + System.currentTimeMillis() + "-testLogLevelInfo.log"; assertFalse(new File(logFilePath).exists()); @@ -150,8 +143,7 @@ public void testLogLevelInfo() } @Test - public void testLogLevelDebug() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testLogLevelDebug() throws IOException { final String logFilePath = LOGS_FOLDER + System.currentTimeMillis() + "-testLogLevelDebug.log"; assertFalse(new File(logFilePath).exists()); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java index 6706b5845..d56aa3380 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java @@ -38,9 +38,6 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.rules.Timeout; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; @@ -118,13 +115,12 @@ public static void setUpBeforeClass() { } @Before - public void setUp() throws ReasonerStateException { + public void setUp() { this.reasoner = new VLogReasoner(kb); } @Test - public void skolem() - throws EdbIdbSeparationException, IncompatiblePredicateArityException, IOException, ReasonerStateException { + public void skolem() throws IOException { this.reasoner.setReasoningTimeout(timeout); this.reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); @@ -134,8 +130,7 @@ public void skolem() } @Test - public void restricted() - throws EdbIdbSeparationException, IncompatiblePredicateArityException, IOException, ReasonerStateException { + public void restricted() throws IOException { this.reasoner.setReasoningTimeout(timeout); this.reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); @@ -145,8 +140,7 @@ public void restricted() } @Test - public void skolemAfterLoad() - throws EdbIdbSeparationException, IncompatiblePredicateArityException, IOException, ReasonerStateException { + public void skolemAfterLoad() throws IOException { this.reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); this.reasoner.load(); @@ -157,8 +151,7 @@ public void skolemAfterLoad() } @Test - public void restrictedAfterLoad() - throws EdbIdbSeparationException, IncompatiblePredicateArityException, IOException, ReasonerStateException { + public void restrictedAfterLoad() throws IOException { this.reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); this.reasoner.load(); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java index 8e1ecc788..a53a28fa2 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java @@ -30,8 +30,6 @@ import org.junit.Ignore; import org.junit.Test; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSource; @@ -42,15 +40,12 @@ import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import karmaresearch.vlog.EDBConfigurationException; - public class AddDataSourceTest { private static final String CSV_FILE_PATH = FileDataSourceTestUtils.INPUT_FOLDER + "unaryFacts.csv"; @Test - public void testAddDataSourceExistentDataForDifferentPredicates() throws ReasonerStateException, - EdbIdbSeparationException, EDBConfigurationException, IOException, IncompatiblePredicateArityException { + public void testAddDataSourceExistentDataForDifferentPredicates() throws IOException { final Predicate predicateParity1 = Expressions.makePredicate("p", 1); final Constant constantA = Expressions.makeConstant("a"); final Fact factPredicatePArity2 = Expressions.makeFact("p", Arrays.asList(constantA, constantA)); @@ -80,8 +75,7 @@ public void testAddDataSourceExistentDataForDifferentPredicates() throws Reasone } @Test - public void testAddDataSourceBeforeLoading() throws ReasonerStateException, EdbIdbSeparationException, - EDBConfigurationException, IOException, IncompatiblePredicateArityException { + public void testAddDataSourceBeforeLoading() throws IOException { final Predicate predicateP = Expressions.makePredicate("p", 1); final Predicate predicateQ = Expressions.makePredicate("q", 1); final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); @@ -98,8 +92,7 @@ public void testAddDataSourceBeforeLoading() throws ReasonerStateException, EdbI // TODO rewrite test @Ignore @Test(expected = ReasonerStateException.class) - public void testAddDataSourceAfterLoading() throws ReasonerStateException, EdbIdbSeparationException, - EDBConfigurationException, IOException, IncompatiblePredicateArityException { + public void testAddDataSourceAfterLoading() throws IOException { final Predicate predicateP = Expressions.makePredicate("p", 1); final Predicate predicateQ = Expressions.makePredicate("q", 1); final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); @@ -116,8 +109,7 @@ public void testAddDataSourceAfterLoading() throws ReasonerStateException, EdbId // TODO rewrite test @Ignore @Test(expected = ReasonerStateException.class) - public void testAddDataSourceAfterReasoning() throws ReasonerStateException, EdbIdbSeparationException, - EDBConfigurationException, IOException, IncompatiblePredicateArityException { + public void testAddDataSourceAfterReasoning() throws IOException { final Predicate predicateP = Expressions.makePredicate("p", 1); final Predicate predicateQ = Expressions.makePredicate("q", 1); final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); @@ -132,11 +124,11 @@ public void testAddDataSourceAfterReasoning() throws ReasonerStateException, Edb } } - //FIXME decide how to handle datasources with multiple predicates + // FIXME decide how to handle datasources with multiple predicates @Ignore // TODO move to a test class for KnowledgeBase @Test(expected = IllegalArgumentException.class) - public void testAddDataSourceNoMultipleDataSourcesForPredicate() throws ReasonerStateException, IOException { + public void testAddDataSourceNoMultipleDataSourcesForPredicate() throws IOException { final Predicate predicate = Expressions.makePredicate("p", 1); final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); @@ -144,12 +136,12 @@ public void testAddDataSourceNoMultipleDataSourcesForPredicate() throws Reasoner kb.addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); kb.addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); } - - //FIXME decide how to handle datasources with multiple predicates + + // FIXME decide how to handle datasources with multiple predicates @Ignore // TODO move to a test class for KnowledgeBase @Test(expected = IllegalArgumentException.class) - public void testAddDataSourceNoFactsForPredicate() throws ReasonerStateException, IOException { + public void testAddDataSourceNoFactsForPredicate() throws IOException { final Predicate predicate = Expressions.makePredicate("p", 1); final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); final Fact fact = Expressions.makeFact(Expressions.makePredicate("p", 1), diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java index 030e6db11..90b6fa877 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java @@ -34,9 +34,6 @@ import org.junit.Assert; import org.junit.Test; import org.mockito.internal.util.collections.Sets; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; @@ -51,13 +48,10 @@ import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; -import karmaresearch.vlog.EDBConfigurationException; - public class AnswerQueryTest { @Test - public void testEDBQuerySameConstantSubstitutesSameVariableName() - throws ReasonerStateException, IOException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public void testEDBQuerySameConstantSubstitutesSameVariableName() throws IOException { final String predicate = "p"; final Constant constantC = Expressions.makeConstant("c"); final Constant constantD = Expressions.makeConstant("d"); @@ -101,8 +95,7 @@ public void testEDBQuerySameConstantSubstitutesSameVariableName() } @Test - public void testIDBQuerySameBlankSubstitutesSameVariableName() - throws ReasonerStateException, IOException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public void testIDBQuerySameBlankSubstitutesSameVariableName() throws IOException { final String predicate = "p"; final Variable x = Expressions.makeVariable("X"); final Variable y = Expressions.makeVariable("Y"); @@ -144,8 +137,7 @@ public void testIDBQuerySameBlankSubstitutesSameVariableName() } @Test - public void testIDBQuerySameIndividualSubstitutesSameVariableName() - throws ReasonerStateException, IOException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public void testIDBQuerySameIndividualSubstitutesSameVariableName() throws IOException { final String predicate = "p"; final Variable x = Expressions.makeVariable("X"); final Variable y = Expressions.makeVariable("Y"); @@ -216,8 +208,7 @@ public void testIDBQuerySameIndividualSubstitutesSameVariableName() } @Test - public void queryResultWithBlanks() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void queryResultWithBlanks() throws IOException { final Variable vx = Expressions.makeVariable("x"); final Variable vy = Expressions.makeVariable("y"); // P(x) -> Q(y) @@ -251,8 +242,7 @@ public void queryResultWithBlanks() } @Test(expected = IllegalArgumentException.class) - public void queryEmptyKnowledgeBaseBeforeReasoning() - throws IOException, EdbIdbSeparationException, ReasonerStateException, IncompatiblePredicateArityException { + public void queryEmptyKnowledgeBaseBeforeReasoning() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { @@ -264,8 +254,7 @@ public void queryEmptyKnowledgeBaseBeforeReasoning() } @Test(expected = IllegalArgumentException.class) - public void queryEmptyKnowledgeBaseAfterReasoning() - throws IOException, EdbIdbSeparationException, ReasonerStateException, IncompatiblePredicateArityException { + public void queryEmptyKnowledgeBaseAfterReasoning() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { @@ -279,8 +268,7 @@ public void queryEmptyKnowledgeBaseAfterReasoning() } @Test - public void queryEmptyRules() - throws IOException, EdbIdbSeparationException, ReasonerStateException, IncompatiblePredicateArityException { + public void queryEmptyRules() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); final Fact fact = Expressions.makeFact("P", Arrays.asList(Expressions.makeConstant("c"))); kb.addStatement(fact); @@ -302,8 +290,7 @@ public void queryEmptyRules() } @Test - public void queryEmptyFacts() throws EDBConfigurationException, IOException, EdbIdbSeparationException, - ReasonerStateException, IncompatiblePredicateArityException { + public void queryEmptyFacts() throws IOException { final Variable vx = Expressions.makeVariable("x"); final Rule rule = Expressions.makeRule(Expressions.makePositiveLiteral("q", vx), Expressions.makePositiveLiteral("p", vx)); @@ -319,7 +306,7 @@ public void queryEmptyFacts() throws EDBConfigurationException, IOException, Edb Assert.assertFalse(queryResultIterator.hasNext()); queryResultIterator.close(); } - + final PositiveLiteral queryAtom2 = Expressions.makePositiveLiteral("q", Expressions.makeVariable("?x")); try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtom2, true)) { Assert.assertFalse(queryResultIterator.hasNext()); @@ -331,7 +318,7 @@ public void queryEmptyFacts() throws EDBConfigurationException, IOException, Edb try (final QueryResultIterator queryResultIteratorAfterReason = reasoner.answerQuery(queryAtom1, true)) { assertFalse(queryResultIteratorAfterReason.hasNext()); } - + try (final QueryResultIterator queryResultIteratorAfterReason = reasoner.answerQuery(queryAtom2, true)) { assertFalse(queryResultIteratorAfterReason.hasNext()); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java index ab02fdac0..7898e0e4e 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java @@ -33,9 +33,6 @@ import org.apache.commons.csv.CSVFormat; import org.apache.commons.csv.CSVParser; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; @@ -102,8 +99,7 @@ public static void testConstructor(final FileDataSource fileDataSource, final Fi } public static void testLoadEmptyFile(final Predicate predicate, final PositiveLiteral queryAtom, - final FileDataSource emptyFileDataSource) - throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { + final FileDataSource emptyFileDataSource) throws IOException { final KnowledgeBase kb = new KnowledgeBase(); kb.addStatement(new DataSourceDeclarationImpl(predicate, emptyFileDataSource)); @@ -122,8 +118,7 @@ public static void testLoadEmptyFile(final Predicate predicate, final PositiveLi } } - public static void testNoFactsOverPredicate(final Reasoner reasoner, final PositiveLiteral queryAtom) - throws ReasonerStateException { + public static void testNoFactsOverPredicate(final Reasoner reasoner, final PositiveLiteral queryAtom) { try (final QueryResultIterator answerQuery = reasoner.answerQuery(queryAtom, true)) { assertFalse(answerQuery.hasNext()); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java index 402568bb0..fc21e98f8 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java @@ -30,9 +30,6 @@ import org.junit.Test; import org.mockito.internal.util.collections.Sets; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; @@ -77,8 +74,7 @@ public class GeneratedAnonymousIndividualsTest { } @Test - public void testBlanksSkolemChaseNoRuleRewrite() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testBlanksSkolemChaseNoRuleRewrite() throws IOException { try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); @@ -94,8 +90,7 @@ public void testBlanksSkolemChaseNoRuleRewrite() } @Test - public void testBlanksSkolemChaseSplitHeadPieces() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testBlanksSkolemChaseSplitHeadPieces() throws IOException { try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); @@ -114,8 +109,7 @@ public void testBlanksSkolemChaseSplitHeadPieces() } @Test - public void testBlanksRestrictedChaseNoRuleRewrite() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testBlanksRestrictedChaseNoRuleRewrite() throws IOException { try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); @@ -131,8 +125,7 @@ public void testBlanksRestrictedChaseNoRuleRewrite() } @Test - public void testBlanksRestrictedChaseSplitHeadPieces() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testBlanksRestrictedChaseSplitHeadPieces() throws IOException { try (final Reasoner reasoner = new VLogReasoner(kb)) { @@ -164,8 +157,7 @@ public void testBlanksRestrictedChaseSplitHeadPieces() } } - private void checkTwoDistinctBlanksGenerated(final Reasoner reasoner) - throws ReasonerStateException, IOException, EdbIdbSeparationException { + private void checkTwoDistinctBlanksGenerated(final Reasoner reasoner) throws IOException { // expected facts: P(c, _:b1), P(c, _:b2) final List> csvContentIncludeBlanks = FileDataSourceTestUtils.getCSVContent(includeBlanksFilePath); assertTrue(csvContentIncludeBlanks.size() == 2); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java index 8a0764a37..208fcd4c0 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java @@ -34,8 +34,6 @@ import org.junit.Ignore; import org.junit.Test; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Fact; @@ -83,8 +81,7 @@ public void testSetReasoningTimeout() { // FIXME update test @Ignore @Test(expected = ReasonerStateException.class) - public void testAddRules1() - throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + public void testAddRules1() throws IOException { try (final Reasoner reasoner = Reasoner.getInstance();) { reasoner.getKnowledgeBase().addStatement(ruleQxPx); reasoner.load(); @@ -92,8 +89,7 @@ public void testAddRules1() } @Test - public void testAddRules2() - throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + public void testAddRules2() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); kb.addStatement(ruleQxPx); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { @@ -114,8 +110,7 @@ public void testAddRules3() { // FIXME update test @Ignore @Test - public void testAddFacts1() - throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + public void testAddFacts1() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); kb.addStatement(factPc); @@ -125,8 +120,7 @@ public void testAddFacts1() } @Test(expected = NullPointerException.class) - public void testAddFacts2() - throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + public void testAddFacts2() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); final List facts = new ArrayList<>(); @@ -140,30 +134,28 @@ public void testAddFacts2() } @Test - public void testResetBeforeLoad() throws ReasonerStateException { + public void testResetBeforeLoad() { try (final Reasoner reasoner = Reasoner.getInstance()) { reasoner.resetReasoner(); } } @Test(expected = NullPointerException.class) - public void setRuleRewriteStrategy1() throws ReasonerStateException { + public void setRuleRewriteStrategy1() { try (final Reasoner reasoner = Reasoner.getInstance();) { reasoner.setRuleRewriteStrategy(null); } } @Test - public void setRuleRewriteStrategy3() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void setRuleRewriteStrategy3() { try (final Reasoner reasoner = Reasoner.getInstance();) { reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.NONE); } } @Test - public void testResetDiscardInferences() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testResetDiscardInferences() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); kb.addStatements(ruleQxPx, factPc); @@ -198,8 +190,7 @@ public void testResetDiscardInferences() } @Test - public void testResetKeepExplicitDatabase() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testResetKeepExplicitDatabase() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); kb.addStatement(ruleQxPx); // assert p(c) @@ -229,8 +220,7 @@ public void testResetKeepExplicitDatabase() } } - private void checkExplicitFacts(final Reasoner reasoner, final Predicate predicateR1) - throws ReasonerStateException { + private void checkExplicitFacts(final Reasoner reasoner, final Predicate predicateR1) { try (final QueryResultIterator queryResultIteratorPx = reasoner.answerQuery(ruleBodyPx, true)) { assertTrue(queryResultIteratorPx.hasNext()); assertEquals(factPc.getTerms(), queryResultIteratorPx.next().getTerms()); @@ -245,8 +235,7 @@ private void checkExplicitFacts(final Reasoner reasoner, final Predicate predica } @Test - public void testResetEmptyKnowledgeBase() - throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + public void testResetEmptyKnowledgeBase() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { @@ -266,24 +255,15 @@ public void testResetEmptyKnowledgeBase() } } - @Test - public void testFailReasonBeforeLoad() - throws ReasonerStateException, IOException, EdbIdbSeparationException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance()) { - reasoner.reason(); - } - - } - @Test(expected = ReasonerStateException.class) - public void testFailAnswerQueryBeforeLoad() throws ReasonerStateException { + public void testFailAnswerQueryBeforeLoad() { try (final Reasoner reasoner = Reasoner.getInstance()) { reasoner.answerQuery(exampleQueryAtom, true); } } @Test(expected = ReasonerStateException.class) - public void testFailExportQueryAnswerToCsvBeforeLoad() throws ReasonerStateException, IOException { + public void testFailExportQueryAnswerToCsvBeforeLoad() throws IOException { try (final Reasoner reasoner = Reasoner.getInstance()) { reasoner.exportQueryAnswersToCsv(exampleQueryAtom, FileDataSourceTestUtils.OUTPUT_FOLDER + "output.csv", true); @@ -291,8 +271,7 @@ public void testFailExportQueryAnswerToCsvBeforeLoad() throws ReasonerStateExcep } @Test - public void testSuccessiveCloseAfterLoad() - throws EdbIdbSeparationException, IOException, IncompatiblePredicateArityException, ReasonerStateException { + public void testSuccessiveCloseAfterLoad() throws IOException { try (final Reasoner reasoner = Reasoner.getInstance()) { reasoner.load(); reasoner.close(); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java index a0f57bef4..aa4ec6a9f 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java @@ -30,18 +30,16 @@ import org.apache.commons.lang3.StringUtils; import org.junit.Test; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; public class SparqlQueryResultDataSourceTest { - + final URL endpoint = new URL("http://query.wikidata.org/sparql"); - - public SparqlQueryResultDataSourceTest() throws MalformedURLException {} - + + public SparqlQueryResultDataSourceTest() throws MalformedURLException { + } + @Test public void testToStringSimpleConstructor() throws MalformedURLException { final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(endpoint, "b,a", @@ -65,17 +63,15 @@ public void testToStringList() throws MalformedURLException { } @Test(expected = IllegalArgumentException.class) - public void testEmptyQueryBodyList() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - + public void testEmptyQueryBodyList() throws IOException { + final LinkedHashSet queryVariables = new LinkedHashSet<>( Arrays.asList(Expressions.makeVariable("a"))); new SparqlQueryResultDataSource(endpoint, queryVariables, StringUtils.SPACE); } @Test(expected = IllegalArgumentException.class) - public void testEmptyQueryBody() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testEmptyQueryBody() throws IOException { new SparqlQueryResultDataSource(endpoint, "a", StringUtils.SPACE); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerBasics.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerBasics.java index fb0b13a35..7be7634e2 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerBasics.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerBasics.java @@ -30,9 +30,6 @@ import java.util.Set; import org.junit.Test; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; @@ -42,8 +39,6 @@ import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import karmaresearch.vlog.EDBConfigurationException; - public class VLogReasonerBasics { final String constantNameC = "c"; @@ -61,8 +56,7 @@ public class VLogReasonerBasics { final Rule ruleCxBx = Expressions.makeRule(atomCx, atomBx); @Test - public void testCloseRepeatedly() - throws EdbIdbSeparationException, IOException, IncompatiblePredicateArityException, ReasonerStateException { + public void testCloseRepeatedly() throws IOException { try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.close(); } @@ -75,8 +69,7 @@ public void testCloseRepeatedly() } @Test - public void testLoadRules() - throws EdbIdbSeparationException, IOException, IncompatiblePredicateArityException, ReasonerStateException { + public void testLoadRules() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); kb.addStatements(ruleBxAx, ruleCxBx, ruleBxAx); @@ -86,8 +79,7 @@ public void testLoadRules() } @Test - public void testSimpleInference() throws EDBConfigurationException, IOException, ReasonerStateException, - EdbIdbSeparationException, IncompatiblePredicateArityException { + public void testSimpleInference() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); kb.addStatements(ruleBxAx, ruleCxBx, factAc, factAd); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java index 18d1b81f6..b26d50f03 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java @@ -31,9 +31,6 @@ import java.util.Set; import org.junit.Test; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; @@ -75,8 +72,7 @@ public VLogReasonerCombinedInputs() throws IOException { } @Test - public void samePredicateSourceFactRule() - throws IOException, ReasonerStateException, IncompatiblePredicateArityException, EdbIdbSeparationException { + public void samePredicateSourceFactRule() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); kb.addStatements(qFromCsv, factQc, factPd, rulePimpliesQ); @@ -90,8 +86,7 @@ public void samePredicateSourceFactRule() } @Test - public void samePredicateFactSourceRule() - throws IOException, ReasonerStateException, IncompatiblePredicateArityException, EdbIdbSeparationException { + public void samePredicateFactSourceRule() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); kb.addStatements(factQc, factPd, qFromCsv, rulePimpliesQ); @@ -105,8 +100,7 @@ public void samePredicateFactSourceRule() } @Test - public void samePredicateRuleFactSource() - throws IOException, ReasonerStateException, IncompatiblePredicateArityException, EdbIdbSeparationException { + public void samePredicateRuleFactSource() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); kb.addStatements(rulePimpliesQ, factQc, factPd, qFromCsv); @@ -120,8 +114,7 @@ public void samePredicateRuleFactSource() } @Test - public void samePredicateSourceSource() - throws IOException, ReasonerStateException, IncompatiblePredicateArityException, EdbIdbSeparationException { + public void samePredicateSourceSource() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); kb.addStatements(qFromCsv, qCDFromCsv); @@ -135,8 +128,7 @@ public void samePredicateSourceSource() } @Test - public void samePredicateSourceFactFact() - throws IOException, ReasonerStateException, IncompatiblePredicateArityException, EdbIdbSeparationException { + public void samePredicateSourceFactFact() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); kb.addStatements(qFromCsv, factQc, factQd); @@ -150,8 +142,7 @@ public void samePredicateSourceFactFact() } @Test - public void samePredicateFactsRule() - throws IOException, ReasonerStateException, IncompatiblePredicateArityException, EdbIdbSeparationException { + public void samePredicateFactsRule() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); kb.addStatements(factPd, factQc, factQc1, factQc2, rulePimpliesQ); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvInput.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvInput.java index 36555fda1..b9e5580e6 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvInput.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvInput.java @@ -34,9 +34,7 @@ import org.junit.Ignore; import org.junit.Test; import org.mockito.internal.util.collections.Sets; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Term; @@ -44,8 +42,6 @@ import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import karmaresearch.vlog.EDBConfigurationException; - public class VLogReasonerCsvInput { private static final Predicate unaryPredicate1 = Expressions.makePredicate("p", 1); @@ -59,8 +55,7 @@ public class VLogReasonerCsvInput { // FIXME: test ignored because of a bug in VLog. Remore the @Ignore annotation // after bug is fixed. @Test - public void testLoadEmptyCsvFile() - throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public void testLoadEmptyCsvFile() throws IOException { final PositiveLiteral queryAtom = Expressions.makePositiveLiteral(unaryPredicate1, makeVariable("x")); FileDataSourceTestUtils.testLoadEmptyFile(unaryPredicate1, queryAtom, @@ -70,17 +65,14 @@ public void testLoadEmptyCsvFile() } @Test - public void testLoadUnaryFactsFromCsvFile() throws ReasonerStateException, EdbIdbSeparationException, - EDBConfigurationException, IOException, IncompatiblePredicateArityException { + public void testLoadUnaryFactsFromCsvFile() throws IOException { testLoadUnaryFactsFromSingleCsvDataSource(new CsvFileDataSource(new File( FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.unzippedUnaryCsvFileRoot + ".csv"))); testLoadUnaryFactsFromSingleCsvDataSource(new CsvFileDataSource(new File( FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.zippedUnaryCsvFileRoot + ".csv.gz"))); } - private void testLoadUnaryFactsFromSingleCsvDataSource(final FileDataSource fileDataSource) - throws ReasonerStateException, EdbIdbSeparationException, EDBConfigurationException, IOException, - IncompatiblePredicateArityException { + private void testLoadUnaryFactsFromSingleCsvDataSource(final FileDataSource fileDataSource) throws IOException { final KnowledgeBase kb = new KnowledgeBase(); kb.addStatement(new DataSourceDeclarationImpl(unaryPredicate1, fileDataSource)); kb.addStatement(new DataSourceDeclarationImpl(unaryPredicate2, fileDataSource)); @@ -110,8 +102,7 @@ private void testLoadUnaryFactsFromSingleCsvDataSource(final FileDataSource file * @throws IncompatiblePredicateArityException */ @Test(expected = IOException.class) - public void testLoadNonexistingCsvFile() - throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public void testLoadNonexistingCsvFile() throws IOException { final File nonexistingFile = new File("nonexistingFile.csv"); assertFalse(nonexistingFile.exists()); final FileDataSource fileDataSource = new CsvFileDataSource(nonexistingFile); @@ -124,8 +115,7 @@ public void testLoadNonexistingCsvFile() } @Test(expected = IncompatiblePredicateArityException.class) - public void testLoadCsvFileWrongArity() - throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public void testLoadCsvFileWrongArity() throws IOException { final FileDataSource fileDataSource = new CsvFileDataSource(new File( FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.binaryCsvFileNameRoot + ".csv")); final KnowledgeBase kb = new KnowledgeBase(); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvOutput.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvOutput.java index e116aefda..34531cc3b 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvOutput.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvOutput.java @@ -28,9 +28,6 @@ import java.util.List; import org.junit.Test; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; @@ -41,8 +38,7 @@ public class VLogReasonerCsvOutput { @Test - public void testEDBQuerySameConstantSubstitutesSameVariableName() - throws ReasonerStateException, IOException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public void testEDBQuerySameConstantSubstitutesSameVariableName() throws IOException { final String predicate = "p"; final Constant constantC = Expressions.makeConstant("c"); final Constant constantD = Expressions.makeConstant("d"); @@ -90,8 +86,7 @@ public void testEDBQuerySameConstantSubstitutesSameVariableName() } @Test(expected = IllegalArgumentException.class) - public void testExportQueryEmptyKnowledgeBaseBeforeReasoningIncludeBlanks() - throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + public void testExportQueryEmptyKnowledgeBaseBeforeReasoningIncludeBlanks() throws IOException { final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("p", Expressions.makeVariable("?x"), Expressions.makeVariable("?y")); @@ -106,8 +101,7 @@ public void testExportQueryEmptyKnowledgeBaseBeforeReasoningIncludeBlanks() } @Test(expected = IllegalArgumentException.class) - public void testExportQueryEmptyKnowledgeBaseBeforeReasoningExcludeBlanks() - throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + public void testExportQueryEmptyKnowledgeBaseBeforeReasoningExcludeBlanks() throws IOException { final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("p", Expressions.makeVariable("?x"), Expressions.makeVariable("?y")); @@ -123,8 +117,7 @@ public void testExportQueryEmptyKnowledgeBaseBeforeReasoningExcludeBlanks() } @Test(expected = IllegalArgumentException.class) - public void testExportQueryEmptyKnowledgeBaseAfterReasoningIncludeBlanks() - throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + public void testExportQueryEmptyKnowledgeBaseAfterReasoningIncludeBlanks() throws IOException { final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("p", Expressions.makeVariable("?x"), Expressions.makeVariable("?y")); @@ -141,8 +134,7 @@ public void testExportQueryEmptyKnowledgeBaseAfterReasoningIncludeBlanks() } @Test(expected = IllegalArgumentException.class) - public void testExportQueryEmptyKnowledgeBaseAfterReasoningExcludeBlanks() - throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + public void testExportQueryEmptyKnowledgeBaseAfterReasoningExcludeBlanks() throws IOException { final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("p", Expressions.makeVariable("?x"), Expressions.makeVariable("?y")); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerNegation.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerNegation.java index ebd8be56c..801556108 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerNegation.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerNegation.java @@ -34,9 +34,6 @@ import java.util.Arrays; import org.junit.Test; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.Fact; @@ -49,8 +46,7 @@ public class VLogReasonerNegation { @Test(expected = RuntimeException.class) - public void testNotStratifiable() - throws EdbIdbSeparationException, IncompatiblePredicateArityException, ReasonerStateException, IOException { + public void testNotStratifiable() throws IOException { final Variable x = makeVariable("x"); final Variable y = makeVariable("y"); @@ -71,8 +67,7 @@ public void testNotStratifiable() } @Test - public void testStratifiable() - throws EdbIdbSeparationException, IncompatiblePredicateArityException, ReasonerStateException, IOException { + public void testStratifiable() throws IOException { final Variable x = makeVariable("x"); final Variable y = makeVariable("y"); @@ -107,8 +102,7 @@ public void testStratifiable() } @Test - public void testInputNegation() - throws EdbIdbSeparationException, IncompatiblePredicateArityException, ReasonerStateException, IOException { + public void testInputNegation() throws IOException { final Variable x = makeVariable("x"); final Variable y = makeVariable("y"); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerRdfInput.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerRdfInput.java index 264237ebb..3b72ab8cd 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerRdfInput.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerRdfInput.java @@ -35,9 +35,6 @@ import org.junit.Ignore; import org.junit.Test; import org.mockito.internal.util.collections.Sets; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Term; @@ -45,8 +42,6 @@ import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import karmaresearch.vlog.EDBConfigurationException; - public class VLogReasonerRdfInput { private static final Predicate ternaryPredicate = Expressions.makePredicate("triple", 3); @@ -61,40 +56,36 @@ public class VLogReasonerRdfInput { makeDatatypeConstant("test string", "http://www.w3.org/2001/XMLSchema#string"))); @Ignore - //TODO test fails for now, because of a VLog bug. Remove the @Ignore annotation after VLog bug is fixed. + // TODO test fails for now, because of a VLog bug. Remove the @Ignore annotation + // after VLog bug is fixed. @Test - public void testLoadEmptyRdfFile() - throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public void testLoadEmptyRdfFile() throws IOException { FileDataSourceTestUtils.testLoadEmptyFile(ternaryPredicate, queryAtom, new RdfFileDataSource(new File(FileDataSourceTestUtils.INPUT_FOLDER + "empty.nt"))); } @Ignore - //TODO test fails for now, because of a VLog bug. Remove the @Ignore annotation after VLog bug is fixed. + // TODO test fails for now, because of a VLog bug. Remove the @Ignore annotation + // after VLog bug is fixed. @Test - public void testLoadEmptyRdfFileGz() - throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public void testLoadEmptyRdfFileGz() throws IOException { FileDataSourceTestUtils.testLoadEmptyFile(ternaryPredicate, queryAtom, new RdfFileDataSource(new File(FileDataSourceTestUtils.INPUT_FOLDER + "empty.nt.gz"))); } @Test - public void testLoadTernaryFactsFromRdfFile() throws ReasonerStateException, EdbIdbSeparationException, - EDBConfigurationException, IOException, IncompatiblePredicateArityException { + public void testLoadTernaryFactsFromRdfFile() throws IOException { testLoadTernaryFactsFromSingleRdfDataSource(new RdfFileDataSource( new File(FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.unzippedNtFileRoot + ".nt"))); } @Test - public void testLoadTernaryFactsFromRdfFileGz() throws ReasonerStateException, EdbIdbSeparationException, - EDBConfigurationException, IOException, IncompatiblePredicateArityException { + public void testLoadTernaryFactsFromRdfFileGz() throws IOException { testLoadTernaryFactsFromSingleRdfDataSource(new RdfFileDataSource( new File(FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.zippedNtFileRoot + ".nt.gz"))); } - public void testLoadTernaryFactsFromSingleRdfDataSource(final FileDataSource fileDataSource) - throws ReasonerStateException, EdbIdbSeparationException, EDBConfigurationException, IOException, - IncompatiblePredicateArityException { + public void testLoadTernaryFactsFromSingleRdfDataSource(final FileDataSource fileDataSource) throws IOException { final KnowledgeBase kb = new KnowledgeBase(); kb.addStatement(new DataSourceDeclarationImpl(ternaryPredicate, fileDataSource)); @@ -109,8 +100,7 @@ public void testLoadTernaryFactsFromSingleRdfDataSource(final FileDataSource fil } @Test(expected = IOException.class) - public void testLoadNonexistingRdfFile() - throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public void testLoadNonexistingRdfFile() throws IOException { final File nonexistingFile = new File("nonexistingFile.nt"); assertFalse(nonexistingFile.exists()); final FileDataSource fileDataSource = new RdfFileDataSource(nonexistingFile); @@ -123,8 +113,7 @@ public void testLoadNonexistingRdfFile() } @Test - public void testLoadRdfInvalidFormat() - throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public void testLoadRdfInvalidFormat() throws IOException { final FileDataSource fileDataSource = new RdfFileDataSource(new File( FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.invalidFormatNtFileNameRoot + ".nt")); final KnowledgeBase kb = new KnowledgeBase(); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerSparqlInput.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerSparqlInput.java index b5a03c7e8..2922c48e4 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerSparqlInput.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerSparqlInput.java @@ -30,9 +30,7 @@ import org.junit.Ignore; import org.junit.Test; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Variable; @@ -53,8 +51,7 @@ public class VLogReasonerSparqlInput { */ @Ignore // Ignored during CI because it makes lengthy calls to remote servers @Test - public void testSimpleSparqlQuery() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testSimpleSparqlQuery() throws IOException { final URL endpoint = new URL("http://query.wikidata.org/sparql"); final LinkedHashSet queryVariables = new LinkedHashSet<>( Arrays.asList(Expressions.makeVariable("b"), Expressions.makeVariable("a"))); @@ -79,8 +76,7 @@ public void testSimpleSparqlQuery() @Ignore // Ignored during CI because it makes lengthy calls to remote servers @Test - public void testSimpleSparqlQueryHttps() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testSimpleSparqlQueryHttps() throws IOException { final URL endpoint = new URL("https://query.wikidata.org/sparql"); final LinkedHashSet queryVariables = new LinkedHashSet<>( Arrays.asList(Expressions.makeVariable("b"), Expressions.makeVariable("a"))); @@ -114,8 +110,7 @@ public void testSimpleSparqlQueryHttps() */ @Ignore // Ignored during CI because it makes lengthy calls to remote servers @Test - public void testSimpleSparqlQuery2() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testSimpleSparqlQuery2() throws IOException { final URL endpoint = new URL("https://query.wikidata.org/sparql"); final LinkedHashSet queryVariables = new LinkedHashSet<>( Arrays.asList(Expressions.makeVariable("b"), Expressions.makeVariable("a"))); @@ -138,8 +133,7 @@ public void testSimpleSparqlQuery2() @Ignore // Ignored during CI because it makes lengthy calls to remote servers @Test(expected = RuntimeException.class) - public void testConjunctiveQueryNewLineCharacterInQueryBody() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testConjunctiveQueryNewLineCharacterInQueryBody() throws IOException { final URL endpoint = new URL("https://query.wikidata.org/sparql"); final LinkedHashSet queryVariables = new LinkedHashSet<>( Arrays.asList(Expressions.makeVariable("a"), Expressions.makeVariable("c"))); @@ -159,8 +153,7 @@ public void testConjunctiveQueryNewLineCharacterInQueryBody() @Ignore // Ignored during CI because it makes lengthy calls to remote servers @Test - public void testConjunctiveQuery() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testConjunctiveQuery() throws IOException { final URL endpoint = new URL("https://query.wikidata.org/sparql"); final LinkedHashSet queryVariables = new LinkedHashSet<>( Arrays.asList(Expressions.makeVariable("a"), Expressions.makeVariable("c"))); @@ -182,8 +175,7 @@ public void testConjunctiveQuery() } @Test(expected = IncompatiblePredicateArityException.class) - public void testDataSourcePredicateDoesNotMatchSparqlQueryTerms() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testDataSourcePredicateDoesNotMatchSparqlQueryTerms() throws IOException { final URL endpoint = new URL("https://query.wikidata.org/sparql"); final LinkedHashSet queryVariables = new LinkedHashSet<>( Arrays.asList(Expressions.makeVariable("b"), Expressions.makeVariable("a"))); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java index c20ff05c0..d72cc644a 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java @@ -24,7 +24,6 @@ import java.io.IOException; -import org.semanticweb.vlog4j.core.exceptions.VLog4jException; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; @@ -87,8 +86,6 @@ public static void main(final String[] args) throws IOException { } System.out.println("Done."); - } catch (final VLog4jException e) { - System.out.println("The reasoner encountered a problem: " + e.getMessage()); } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java index 2d14c2438..281f48a32 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java @@ -25,7 +25,6 @@ import java.util.Arrays; import java.util.List; -import org.semanticweb.vlog4j.core.exceptions.VLog4jException; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.LogLevel; @@ -90,9 +89,6 @@ public static void main(final String[] args) throws IOException { } System.out.println("\nDone."); - } catch (final VLog4jException e) { - System.out.println("The reasoner encountered a problem:" + e.getMessage()); - } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java index b3750e0bf..fea0dbe82 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java @@ -22,7 +22,6 @@ import java.io.IOException; -import org.semanticweb.vlog4j.core.exceptions.VLog4jException; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; @@ -92,8 +91,6 @@ public static void main(final String[] args) throws IOException { ExamplesUtils.printOutQueryAnswers("inEuropeOutsideGermany(?Org)", reasoner); System.out.println("Done."); - } catch (final VLog4jException e) { - System.out.println("Error: " + e.getMessage()); } } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java index 371fcc530..7af9eb473 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java @@ -22,9 +22,6 @@ import java.io.IOException; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; @@ -54,8 +51,7 @@ */ public class AddDataFromCsvFile { - public static void main(final String[] args) throws EdbIdbSeparationException, IOException, ReasonerStateException, - IncompatiblePredicateArityException, ParsingException { + public static void main(final String[] args) throws IOException, ParsingException { ExamplesUtils.configureLogging(); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java index 48fda4f86..dd62026ce 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java @@ -22,9 +22,6 @@ import java.io.IOException; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; @@ -61,8 +58,7 @@ */ public class AddDataFromRdfFile { - public static void main(final String[] args) throws EdbIdbSeparationException, IOException, ReasonerStateException, - IncompatiblePredicateArityException, ParsingException { + public static void main(final String[] args) throws IOException, ParsingException { ExamplesUtils.configureLogging(); /* 1. Prepare rules and create some related vocabulary objects used later. */ diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java index 180931863..21b403dcb 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java @@ -26,9 +26,6 @@ import java.util.LinkedHashSet; import java.util.List; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; @@ -77,8 +74,7 @@ public class AddDataFromSparqlQueryResults { */ private static final String WIKIDATA_FATHER_PROPERTY = "wdt:P22"; - public static void main(final String[] args) - throws ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException, IOException { + public static void main(final String[] args) throws IOException { ExamplesUtils.configureLogging(); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java index 247b2007e..3ca012638 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java @@ -34,9 +34,6 @@ import org.eclipse.jdt.annotation.NonNull; import org.eclipse.jdt.annotation.Nullable; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.implementation.Expressions; @@ -95,8 +92,7 @@ public class ConfigureReasonerLogging { /* A(c,d) */ private static Fact fact = Expressions.makeFact("A_EDB", Arrays.asList(makeConstant("c"), makeConstant("d"))); - public static void main(final String[] args) - throws EdbIdbSeparationException, IncompatiblePredicateArityException, IOException, ReasonerStateException { + public static void main(final String[] args) throws IOException { try (final Reasoner reasoner = Reasoner.getInstance()) { final KnowledgeBase kb = reasoner.getKnowledgeBase(); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java index d07e4d6c5..256d211b5 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java @@ -22,9 +22,6 @@ import java.io.IOException; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.reasoner.Algorithm; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; @@ -45,8 +42,7 @@ */ public class SkolemVsRestrictedChaseTermination { - public static void main(final String[] args) throws ReasonerStateException, EdbIdbSeparationException, - IncompatiblePredicateArityException, IOException, ParsingException { + public static void main(final String[] args) throws IOException, ParsingException { ExamplesUtils.configureLogging(); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java index dc770022f..f429ff4a0 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java @@ -28,9 +28,6 @@ import java.util.List; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.examples.ExamplesUtils; import org.semanticweb.vlog4j.graal.GraalConjunctiveQueryToRule; @@ -61,8 +58,7 @@ */ public class AddDataFromDlgpFile { - public static void main(final String[] args) - throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + public static void main(final String[] args) throws IOException { final List graalAtoms = new ArrayList<>(); final List graalRules = new ArrayList<>(); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java index 4e9d31111..7d595dfad 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java @@ -24,9 +24,6 @@ import java.util.ArrayList; import java.util.List; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; @@ -62,8 +59,7 @@ */ public class AddDataFromGraal { - public static void main(final String[] args) - throws ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException, IOException { + public static void main(final String[] args) throws IOException { /* * 1. Instantiating rules */ diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java index 5c336ecd4..f203d401f 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java @@ -27,9 +27,6 @@ import java.io.IOException; import java.net.URL; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; @@ -62,8 +59,7 @@ */ public class DoidExampleGraal { - public static void main(final String[] args) - throws ReasonerStateException, IOException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public static void main(final String[] args) throws IOException { ExamplesUtils.configureLogging(); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java index 7beb9f6fb..3c3834942 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java @@ -29,9 +29,6 @@ import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyCreationException; import org.semanticweb.owlapi.model.OWLOntologyManager; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; @@ -55,8 +52,7 @@ */ public class OwlOntologyToRulesAndFacts { - public static void main(final String[] args) throws OWLOntologyCreationException, ReasonerStateException, - EdbIdbSeparationException, IncompatiblePredicateArityException, IOException { + public static void main(final String[] args) throws OWLOntologyCreationException, IOException { /* Bike ontology is loaded from a Bike file using OWL API */ final OWLOntologyManager ontologyManager = OWLManager.createOWLOntologyManager(); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java index 0d13eefe7..d5a81a085 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java @@ -37,9 +37,6 @@ import org.openrdf.rio.RDFParser; import org.openrdf.rio.Rio; import org.openrdf.rio.helpers.StatementCollector; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; @@ -64,8 +61,8 @@ */ public class AddDataFromRdfModel { - public static void main(final String[] args) throws IOException, RDFParseException, RDFHandlerException, - URISyntaxException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public static void main(final String[] args) + throws IOException, RDFParseException, RDFHandlerException, URISyntaxException { ExamplesUtils.configureLogging(); diff --git a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java b/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java index 85c7f24b3..ea3dd7a46 100644 --- a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java +++ b/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java @@ -40,9 +40,6 @@ import org.openrdf.rio.RDFFormat; import org.openrdf.rio.RDFHandlerException; import org.openrdf.rio.RDFParseException; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; @@ -66,8 +63,7 @@ public class TestReasonOverRdfFacts { private static final Variable object = makeVariable("o"); @Test - public void testCanLoadRdfFactsIntoReasoner() throws RDFParseException, RDFHandlerException, IOException, - ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public void testCanLoadRdfFactsIntoReasoner() throws RDFParseException, RDFHandlerException, IOException { final Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "exampleFacts.ttl"), RDFFormat.TURTLE); final Set facts = RdfModelConverter.rdfModelToFacts(model); @@ -86,8 +82,7 @@ public void testCanLoadRdfFactsIntoReasoner() throws RDFParseException, RDFHandl } @Test - public void testQueryAnsweringOverRdfFacts() throws RDFParseException, RDFHandlerException, IOException, - ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public void testQueryAnsweringOverRdfFacts() throws RDFParseException, RDFHandlerException, IOException { final Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "exampleFacts.ttl"), RDFFormat.TURTLE); final Set facts = RdfModelConverter.rdfModelToFacts(model); @@ -107,8 +102,7 @@ public void testQueryAnsweringOverRdfFacts() throws RDFParseException, RDFHandle } } - private Set> getQueryResults(final Reasoner reasoner, final PositiveLiteral query) - throws ReasonerStateException { + private Set> getQueryResults(final Reasoner reasoner, final PositiveLiteral query) { final QueryResultIterator queryResultIterator = reasoner.answerQuery(query, true); final Set> queryResults = new HashSet<>(); From cc1d2ae18ada7ba4082a8ab5288b3fe3cf4e457e Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Thu, 22 Aug 2019 10:10:28 +0200 Subject: [PATCH 0155/1003] remove @Nullable annotation --- .../main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java | 1 - 1 file changed, 1 deletion(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index a1c1cc72b..a47b50349 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -141,7 +141,6 @@ public static Reasoner getInstance() { * @return if not {@code null}, number of seconds after which the reasoning will * be interrupted, if it has not reached completion. */ - @Nullable Integer getReasoningTimeout(); /** From 66788a785cfac0bfb40041bf8d5c7b5ca67ea094 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Thu, 22 Aug 2019 11:02:31 +0200 Subject: [PATCH 0156/1003] test reset reasoning timeout to null --- .../vlog4j/core/reasoner/Reasoner.java | 1 - .../core/reasoner/ReasonerTimeoutTest.java | 60 +++++++++++++------ 2 files changed, 41 insertions(+), 20 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index a47b50349..ff4654864 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -2,7 +2,6 @@ import java.io.IOException; -import org.eclipse.jdt.annotation.Nullable; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java index d56aa3380..f93ca6b10 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.core.reasoner; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeConstant; /*- @@ -75,12 +77,23 @@ public class ReasonerTimeoutTest { private final static KnowledgeBase kb = new KnowledgeBase(); /** - * The timeout after which reasoning should be completed. One second is added to - * account for setup and tear down of reasoning resources. + * The timeout after which reasoning should be completed. */ @org.junit.Rule - public Timeout globalTimeout = Timeout.seconds(timeout + 1); - + public Timeout globalTimeout = Timeout.seconds(timeout * 5); + + private final static Predicate infinite_EDB = makePredicate("infinite_EDB", 2); + private final static Predicate infinite_IDB = makePredicate("infinite_IDB", 2); + private final static Variable x = makeVariable("x"); + private final static Variable y = makeVariable("y"); + + private final static PositiveLiteral infinite_IDB_xy = makePositiveLiteral(infinite_IDB, x, y); + private final static PositiveLiteral infinite_EDB_xy = makePositiveLiteral(infinite_EDB, x, y); + private final static Variable z = makeVariable("z"); + + private final static PositiveLiteral infinite_IDB_yz = makePositiveLiteral(infinite_IDB, y, z); + private final static Rule infinite_rule = makeRule(infinite_IDB_yz, infinite_IDB_xy); + /** * This method provides the {@link #facts} and {@link #rules} to be used in all * test runs. To test if the timeout works as expected, a small set of facts and @@ -90,24 +103,13 @@ public class ReasonerTimeoutTest { */ @BeforeClass public static void setUpBeforeClass() { - final Predicate infinite_EDB = makePredicate("infinite_EDB", 2); - final Predicate infinite_IDB = makePredicate("infinite_IDB", 2); facts.add(makeFact(infinite_EDB, Arrays.asList(makeConstant("A"), makeConstant("B")))); - final Variable x = makeVariable("x"); - final Variable y = makeVariable("y"); - - final PositiveLiteral infinite_IDB_xy = makePositiveLiteral(infinite_IDB, x, y); - final PositiveLiteral infinite_EDB_xy = makePositiveLiteral(infinite_EDB, x, y); final Rule import_rule = makeRule(infinite_IDB_xy, infinite_EDB_xy); rules.add(import_rule); - final Variable z = makeVariable("z"); - - final PositiveLiteral infinite_IDB_yz = makePositiveLiteral(infinite_IDB, y, z); - final Rule infinite_rule = makeRule(infinite_IDB_yz, infinite_IDB_xy); rules.add(infinite_rule); kb.addStatements(rules); @@ -126,7 +128,7 @@ public void skolem() throws IOException { this.reasoner.load(); - this.reasoner.reason(); + assertFalse(this.reasoner.reason()); } @Test @@ -136,7 +138,7 @@ public void restricted() throws IOException { this.reasoner.load(); - this.reasoner.reason(); + assertFalse(this.reasoner.reason()); } @Test @@ -147,7 +149,7 @@ public void skolemAfterLoad() throws IOException { this.reasoner.setReasoningTimeout(timeout); - this.reasoner.reason(); + assertFalse(this.reasoner.reason()); } @Test @@ -158,7 +160,27 @@ public void restrictedAfterLoad() throws IOException { this.reasoner.setReasoningTimeout(timeout); - this.reasoner.reason(); + assertFalse(this.reasoner.reason()); + } + + @Test + public void resetReasoningTimeoutToNull() throws IOException { + this.reasoner.setReasoningTimeout(timeout); + + this.reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); + this.reasoner.load(); + assertFalse(this.reasoner.reason()); + + this.reasoner.resetReasoner(); + + final PositiveLiteral blocking_IDB_yx = makePositiveLiteral(infinite_IDB, y, x); + final Rule blockingRule = makeRule(blocking_IDB_yx, infinite_IDB_xy); + kb.addStatement(blockingRule); + + + this.reasoner.setReasoningTimeout(null); + this.reasoner.load(); + assertTrue(this.reasoner.reason()); } @After From 203b883b5df373f83080eb4838e89a7cfaeb2f78 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Thu, 22 Aug 2019 11:15:24 +0200 Subject: [PATCH 0157/1003] renamed reasoner states --- .../vlog4j/core/reasoner/ReasonerState.java | 19 ++++---- .../reasoner/implementation/VLogReasoner.java | 44 +++++++++---------- 2 files changed, 32 insertions(+), 31 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java index 4dbf79176..121b6b4d8 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java @@ -30,29 +30,30 @@ public enum ReasonerState { /** * State a Reasoner is in before method {@link Reasoner#load()} has been called. - * The Reasoner cannot reason before it has been loaded. The Reasoner can only - * be loaded once. Reasoning and querying are not allowed in this state. + * Querying is not allowed in this state. */ - BEFORE_LOADING("before loading"), + KB_NOT_LOADED("knowledge base not loaded"), /** * State a Reasoner is in after method {@link Reasoner#load()} has been called, * and before method {@link Reasoner#reason()} has been called. The Reasoner can + * be queried. */ - AFTER_LOADING("loaded"), + KB_LOADED("knowledge base loaded"), /** * State a Reasoner is in after method {@link Reasoner#reason()} has been + * called. */ - AFTER_REASONING("after reasoning"), + MATERIALISED("after reasoning"), /** * State in which the knowledge base of an already loaded reasoner has been * changed. This can occur if the knowledge base has been modified after loading - * (in {@link ReasonerState#AFTER_LOADING} state), or after reasoning (in - * {@link ReasonerState#AFTER_REASONING} state). + * (in {@link ReasonerState#KB_LOADED} state), or after reasoning (in + * {@link ReasonerState#MATERIALISED} state). */ - KNOWLEDGE_BASE_CHANGED("knowledge base changed"), + KB_CHANGED("knowledge base changed"), /** * State a Reasoner is in after method {@link Reasoner#close()} has been called. * The Reasoner cannot reason again, once it reached this state. Loading and @@ -60,7 +61,7 @@ public enum ReasonerState { * adding rules, fact and fact data sources and setting the rule re-writing * strategy are not allowed in this state. */ - AFTER_CLOSING("closed"); + CLOSED("closed"); private final String name; diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 56d915054..936fabe14 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -242,7 +242,7 @@ void addEdbAlias(DataSourceDeclaration dataSourceDeclaration) { final Map> directEdbFacts = new HashMap<>(); final Set rules = new HashSet<>(); - private ReasonerState reasonerState = ReasonerState.BEFORE_LOADING; + private ReasonerState reasonerState = ReasonerState.KB_NOT_LOADED; private MaterialisationState materialisationState = MaterialisationState.INCOMPLETE; private LogLevel internalLogLevel = LogLevel.WARNING; @@ -334,7 +334,7 @@ public void load() throws IOException { loadFacts(); loadRules(); - this.reasonerState = ReasonerState.AFTER_LOADING; + this.reasonerState = ReasonerState.KB_LOADED; } String getDataSourceConfigurationString() { @@ -448,28 +448,28 @@ void loadRules() { @Override public boolean reason() throws IOException { switch (this.reasonerState) { - case BEFORE_LOADING: + case KB_NOT_LOADED: load(); runChase(); break; - case AFTER_LOADING: + case KB_LOADED: runChase(); break; - case KNOWLEDGE_BASE_CHANGED: - case AFTER_REASONING: + case KB_CHANGED: + case MATERIALISED: resetReasoner(); load(); runChase(); break; - case AFTER_CLOSING: + case CLOSED: throw new ReasonerStateException(this.reasonerState, "Reasoning is not allowed after closing."); } return this.reasoningCompleted; } private void runChase() { - this.reasonerState = ReasonerState.AFTER_REASONING; + this.reasonerState = ReasonerState.MATERIALISED; final boolean skolemChase = this.algorithm == Algorithm.SKOLEM_CHASE; try { @@ -496,7 +496,7 @@ private void runChase() { @Override public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBlanks) { validateNotClosed(); - if (this.reasonerState == ReasonerState.BEFORE_LOADING) { + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); } Validate.notNull(query, "Query atom must not be null!"); @@ -521,7 +521,7 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBla public MaterialisationState exportQueryAnswersToCsv(final PositiveLiteral query, final String csvFilePath, final boolean includeBlanks) throws IOException { validateNotClosed(); - if (this.reasonerState == ReasonerState.BEFORE_LOADING) { + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); } Validate.notNull(query, "Query atom must not be null!"); @@ -545,14 +545,14 @@ public MaterialisationState exportQueryAnswersToCsv(final PositiveLiteral query, public void resetReasoner() { validateNotClosed(); // TODO what should happen to the KB? - this.reasonerState = ReasonerState.BEFORE_LOADING; + this.reasonerState = ReasonerState.KB_NOT_LOADED; this.vLog.stop(); LOGGER.info("Reasoner has been reset. All inferences computed during reasoning have been discarded."); } @Override public void close() { - this.reasonerState = ReasonerState.AFTER_CLOSING; + this.reasonerState = ReasonerState.CLOSED; this.knowledgeBase.deleteListener(this); this.vLog.stop(); } @@ -599,7 +599,7 @@ public boolean isRMFA() { @Override public boolean isMFC() { validateNotClosed(); - if (this.reasonerState == ReasonerState.BEFORE_LOADING) { + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { throw new ReasonerStateException(this.reasonerState, "checking rules acyclicity is not allowed before loading!"); } @@ -615,7 +615,7 @@ public boolean isMFC() { private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) { validateNotClosed(); - if (this.reasonerState == ReasonerState.BEFORE_LOADING) { + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { throw new ReasonerStateException(this.reasonerState, "checking rules acyclicity is not allowed before loading!"); } @@ -662,18 +662,18 @@ public void onStatementAdded(Statement statementAdded) { } private void updateReasonerToKnowledgeBaseChanged() { - if (this.reasonerState.equals(ReasonerState.AFTER_LOADING) - || this.reasonerState.equals(ReasonerState.AFTER_REASONING)) { + if (this.reasonerState.equals(ReasonerState.KB_LOADED) + || this.reasonerState.equals(ReasonerState.MATERIALISED)) { - this.reasonerState = ReasonerState.KNOWLEDGE_BASE_CHANGED; + this.reasonerState = ReasonerState.KB_CHANGED; this.materialisationState = MaterialisationState.WRONG; } } // private void updateReasonerStateToKnowledgeBaseChanged() { -// if (this.reasonerState.equals(ReasonerState.AFTER_LOADING) -// || this.reasonerState.equals(ReasonerState.AFTER_REASONING)) { -// this.reasonerState = ReasonerState.KNOWLEDGE_BASE_CHANGED; +// if (this.reasonerState.equals(ReasonerState.KB_LOADED) +// || this.reasonerState.equals(ReasonerState.MATERIALISED)) { +// this.reasonerState = ReasonerState.KB_CHANGED; // } // } @@ -689,7 +689,7 @@ private void updateReasonerToKnowledgeBaseChanged() { // } // private void updateMaterialisationStateOnStatementsAdded(boolean materialisationInvalidated) { -// if (this.reasonerState.equals(ReasonerState.KNOWLEDGE_BASE_CHANGED) && materialisationInvalidated) { +// if (this.reasonerState.equals(ReasonerState.KB_CHANGED) && materialisationInvalidated) { // this.materialisationState = MaterialisationState.WRONG; // } // } @@ -700,7 +700,7 @@ private void updateReasonerToKnowledgeBaseChanged() { * @throws ReasonerStateException */ void validateNotClosed() throws ReasonerStateException { - if (this.reasonerState == ReasonerState.AFTER_CLOSING) { + if (this.reasonerState == ReasonerState.CLOSED) { LOGGER.error("Invalid operation requested on a closed reasoner object."); throw new ReasonerStateException(this.reasonerState, "Operation not allowed after closing reasoner."); } From d59dd602fa7ebdeb17d6507cad41da6d342afddf Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Thu, 22 Aug 2019 16:09:01 +0200 Subject: [PATCH 0158/1003] loading in several states --- .../vlog4j/core/reasoner/ReasonerState.java | 8 +-- .../reasoner/implementation/VLogReasoner.java | 56 +++++++++++++------ .../VLogReasonerCombinedInputs.java | 10 ++-- 3 files changed, 50 insertions(+), 24 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java index 121b6b4d8..4fb79e735 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java @@ -56,10 +56,10 @@ public enum ReasonerState { KB_CHANGED("knowledge base changed"), /** * State a Reasoner is in after method {@link Reasoner#close()} has been called. - * The Reasoner cannot reason again, once it reached this state. Loading and - * setting the reasoning algorithm in this state are ineffective. Reasoning, - * adding rules, fact and fact data sources and setting the rule re-writing - * strategy are not allowed in this state. + * The Reasoner cannot reason again, once it reached this state. Loading, + * reasoning, adding rules, fact and fact data sources, setting the rule + * re-writing strategy, the reasoning algorithm and the reasoning timeout. are + * not allowed in this state. */ CLOSED("closed"); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 936fabe14..f402df282 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -260,6 +260,8 @@ public VLogReasoner(KnowledgeBase knowledgeBase) { super(); this.knowledgeBase = knowledgeBase; this.knowledgeBase.addListener(this); + + setLogLevel(this.internalLogLevel); } @Override @@ -308,7 +310,24 @@ public RuleRewriteStrategy getRuleRewriteStrategy() { @Override public void load() throws IOException { validateNotClosed(); + + switch (this.reasonerState) { + case KB_NOT_LOADED: + loadKnowledgeBase(); + break; + case KB_LOADED: + case MATERIALISED: + // do nothing, all KB is already loaded + break; + case KB_CHANGED: + resetReasoner(); + loadKnowledgeBase(); + default: + break; + } + } + void loadKnowledgeBase() throws IOException { final LoadKbVisitor visitor = new LoadKbVisitor(); visitor.clearIndexes(); for (final Statement statement : knowledgeBase) { @@ -326,8 +345,6 @@ public void load() throws IOException { } catch (final EDBConfigurationException e) { throw new RuntimeException("Invalid data sources configuration.", e); } - // TODO: can't we set this earlier? Why here? - setLogLevel(this.internalLogLevel); validateDataSourcePredicateArities(); @@ -335,6 +352,9 @@ public void load() throws IOException { loadRules(); this.reasonerState = ReasonerState.KB_LOADED; + + //TODO: if there are no rules, then materialisation state is complete + this.materialisationState = MaterialisationState.INCOMPLETE; } String getDataSourceConfigurationString() { @@ -414,11 +434,11 @@ void loadFacts() { aliasPredicate = aliasesForEdbPredicates.get(new LocalFactsDataSourceDeclaration(predicate)); } try { - String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(aliasPredicate); - String[][] vLogPredicateTuples = ModelToVLogConverter.toVLogFactTuples(directEdbFacts.get(predicate)); + final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(aliasPredicate); + final String[][] vLogPredicateTuples = ModelToVLogConverter.toVLogFactTuples(directEdbFacts.get(predicate)); this.vLog.addData(vLogPredicateName, vLogPredicateTuples); if (LOGGER.isDebugEnabled()) { - for (String[] tuple : vLogPredicateTuples) { + for (final String[] tuple : vLogPredicateTuples) { LOGGER.debug( "Loaded direct fact " + vLogPredicateName + "(" + Arrays.deepToString(tuple) + ")"); } @@ -436,7 +456,7 @@ void loadRules() { try { this.vLog.setRules(vLogRuleArray, vLogRuleRewriteStrategy); if (LOGGER.isDebugEnabled()) { - for (karmaresearch.vlog.Rule rule : vLogRuleArray) { + for (final karmaresearch.vlog.Rule rule : vLogRuleArray) { LOGGER.debug("Loaded rule " + rule.toString()); } } @@ -447,6 +467,8 @@ void loadRules() { @Override public boolean reason() throws IOException { + validateNotClosed(); + switch (this.reasonerState) { case KB_NOT_LOADED: load(); @@ -455,16 +477,18 @@ public boolean reason() throws IOException { case KB_LOADED: runChase(); break; - case KB_CHANGED: - case MATERIALISED: resetReasoner(); load(); runChase(); break; - case CLOSED: - throw new ReasonerStateException(this.reasonerState, "Reasoning is not allowed after closing."); + case MATERIALISED: + runChase(); + break; + default: + break; } + return this.reasoningCompleted; } @@ -479,18 +503,19 @@ private void runChase() { } else { this.reasoningCompleted = this.vLog.materialize(skolemChase, this.timeoutAfterSeconds); } - this.materialisationState = this.reasoningCompleted ? MaterialisationState.COMPLETE - : MaterialisationState.INCOMPLETE; } catch (final NotStartedException e) { throw new RuntimeException("Inconsistent reasoner state.", e); } catch (final MaterializationException e) { - // FIXME: the message generate here is not guaranteed to be the correct + // FIXME: the message generated here is not guaranteed to be the correct // interpretation of the exception that is caught throw new RuntimeException( "Knowledge base incompatible with stratified negation: either the Rules are not stratifiable, or the variables in negated atom cannot be bound.", e); } + + this.materialisationState = this.reasoningCompleted ? MaterialisationState.COMPLETE + : MaterialisationState.INCOMPLETE; } @Override @@ -544,7 +569,6 @@ public MaterialisationState exportQueryAnswersToCsv(final PositiveLiteral query, @Override public void resetReasoner() { validateNotClosed(); - // TODO what should happen to the KB? this.reasonerState = ReasonerState.KB_NOT_LOADED; this.vLog.stop(); LOGGER.info("Reasoner has been reset. All inferences computed during reasoning have been discarded."); @@ -607,7 +631,7 @@ public boolean isMFC() { CyclicCheckResult checkCyclic; try { checkCyclic = this.vLog.checkCyclic("MFC"); - } catch (NotStartedException e) { + } catch (final NotStartedException e) { throw new RuntimeException(e.getMessage(), e); // should be impossible } return checkCyclic.equals(CyclicCheckResult.CYCLIC); @@ -623,7 +647,7 @@ private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) { CyclicCheckResult checkCyclic; try { checkCyclic = this.vLog.checkCyclic(acyclNotion.name()); - } catch (NotStartedException e) { + } catch (final NotStartedException e) { throw new RuntimeException(e.getMessage(), e); // should be impossible } return checkCyclic.equals(CyclicCheckResult.NON_CYCLIC); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java index b26d50f03..43d54c2dc 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java @@ -27,6 +27,7 @@ import java.io.IOException; import java.util.Arrays; import java.util.Collections; +import java.util.HashSet; import java.util.List; import java.util.Set; @@ -56,10 +57,11 @@ public class VLogReasonerCombinedInputs { final Fact factPd = Expressions.makeFact("p", Arrays.asList(Expressions.makeConstant("d"))); final PositiveLiteral queryQx = Expressions.makePositiveLiteral(q, Arrays.asList(Expressions.makeVariable("x"))); - final Set> resultsCC1C2D = Set.of(Collections.singletonList(Expressions.makeConstant("c")), - Collections.singletonList(Expressions.makeConstant("c1")), - Collections.singletonList(Expressions.makeConstant("c2")), - Collections.singletonList(Expressions.makeConstant("d"))); + final Set> resultsCC1C2D = new HashSet<>( + Arrays.asList(Collections.singletonList(Expressions.makeConstant("c")), + Collections.singletonList(Expressions.makeConstant("c1")), + Collections.singletonList(Expressions.makeConstant("c2")), + Collections.singletonList(Expressions.makeConstant("d")))); final DataSourceDeclaration qFromCsv; final DataSourceDeclaration qCDFromCsv; From f3d9cedfec158f14bf35856edc84c662fa432d8b Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Thu, 22 Aug 2019 16:19:14 +0200 Subject: [PATCH 0159/1003] make sure default logging level is set --- .../vlog4j/core/reasoner/LoggingTest.java | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java index 8ea46a921..9134e43dc 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java @@ -162,6 +162,35 @@ public void testLogLevelDebug() throws IOException { } + @Test + public void testLogLevelDefault() throws IOException { + final String defaultLogFilePath = LOGS_FOLDER + System.currentTimeMillis() + "-testLogLevelDefault.log"; + assertFalse(new File(defaultLogFilePath).exists()); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.setLogFile(defaultLogFilePath); + + reasoner.load(); + reasoner.reason(); + reasoner.close(); + } + final int countLinesReasonLogLevelDefault = readFile(defaultLogFilePath); + + final String warningLogFilePath = LOGS_FOLDER + System.currentTimeMillis() + "-testLogLevelDefault.log"; + assertFalse(new File(warningLogFilePath).exists()); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.setLogFile(warningLogFilePath); + reasoner.setLogLevel(LogLevel.WARNING); + reasoner.load(); + reasoner.reason(); + reasoner.close(); + } + final int countLinesReasonLogLevelWarning = readFile(warningLogFilePath); + + assertTrue(countLinesReasonLogLevelDefault == countLinesReasonLogLevelWarning); + } + private int readFile(final String logFilePath) throws IOException, FileNotFoundException { int countLines = 0; assertTrue(new File(logFilePath).exists()); From 063b46573cabcfd7f705619c47b268afdfcab645 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Thu, 22 Aug 2019 18:11:45 +0200 Subject: [PATCH 0160/1003] small unit test materialisation state --- .../model/implementation/Expressions.java | 27 +++++++++++ .../core/reasoner/MaterialisationState.java | 18 +++++-- .../reasoner/implementation/VLogReasoner.java | 26 ++++++---- .../implementation/ReasonerStateTest.java | 48 +++++++++++++++++-- 4 files changed, 104 insertions(+), 15 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java index 7a89a56a6..54a87edf9 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java @@ -113,6 +113,21 @@ public static Fact makeFact(final String predicateName, final List terms) return new FactImpl(predicate, terms); } + + /** + * Creates a {@code Fact}. + * + * @param predicateName on-blank {@link Predicate} name + * @param terms non-empty, non-null array of non-null terms + * @return a {@link Fact} with given {@code terms} and {@link Predicate} + * constructed from name given {@code predicateName} and {@code arity} + * given {@code terms} size. + */ + public static Fact makeFact(final String predicateName, Term... terms) { + final Predicate predicate = makePredicate(predicateName, terms.length); + + return new FactImpl(predicate, Arrays.asList(terms)); + } /** * Creates a {@code Fact}. @@ -125,6 +140,18 @@ public static Fact makeFact(final String predicateName, final List terms) public static Fact makeFact(final Predicate predicate, final List terms) { return new FactImpl(predicate, terms); } + + /** + * Creates a {@code Fact}. + * + * @param predicate a non-null {@link Predicate} + * @param terms non-empty, non-null array of non-null terms. Array size must + * be the same as the given {@code predicate} arity. + * @return a {@link Fact} corresponding to the input. + */ + public static Fact makeFact(final Predicate predicate, final Term... terms) { + return new FactImpl(predicate, Arrays.asList(terms)); + } /** * Creates a {@code PositiveLiteral}. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/MaterialisationState.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/MaterialisationState.java index fed24a7b9..3e405eae8 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/MaterialisationState.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/MaterialisationState.java @@ -29,23 +29,33 @@ */ public enum MaterialisationState { - //TODO should we have different states for incomplete due to halting, vs incomplete due to adding facts for non-negated rules? /** * Reasoning has not completed. Query answering yields sound, but possibly * incomplete answers. */ - INCOMPLETE, + INCOMPLETE("incomplete"), /** * Query answering may give incorrect answers. Re-materialisation * ({@link Reasoner#reason()}) is required, in order to obtain correct results. */ - WRONG, + WRONG("wrong"), /** * Reasoning over current knowledge base is complete, and query answering yields * sound and complete results. */ - COMPLETE + COMPLETE("complete"); + + private final String name; + + private MaterialisationState(String name) { + this.name = name; + } + + @Override + public String toString() { + return name; + } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index f402df282..7a7b2011c 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -260,7 +260,7 @@ public VLogReasoner(KnowledgeBase knowledgeBase) { super(); this.knowledgeBase = knowledgeBase; this.knowledgeBase.addListener(this); - + setLogLevel(this.internalLogLevel); } @@ -310,7 +310,7 @@ public RuleRewriteStrategy getRuleRewriteStrategy() { @Override public void load() throws IOException { validateNotClosed(); - + switch (this.reasonerState) { case KB_NOT_LOADED: loadKnowledgeBase(); @@ -352,8 +352,8 @@ void loadKnowledgeBase() throws IOException { loadRules(); this.reasonerState = ReasonerState.KB_LOADED; - - //TODO: if there are no rules, then materialisation state is complete + + // TODO: if there are no rules, then materialisation state is complete this.materialisationState = MaterialisationState.INCOMPLETE; } @@ -435,7 +435,8 @@ void loadFacts() { } try { final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(aliasPredicate); - final String[][] vLogPredicateTuples = ModelToVLogConverter.toVLogFactTuples(directEdbFacts.get(predicate)); + final String[][] vLogPredicateTuples = ModelToVLogConverter + .toVLogFactTuples(directEdbFacts.get(predicate)); this.vLog.addData(vLogPredicateName, vLogPredicateTuples); if (LOGGER.isDebugEnabled()) { for (final String[] tuple : vLogPredicateTuples) { @@ -468,7 +469,7 @@ void loadRules() { @Override public boolean reason() throws IOException { validateNotClosed(); - + switch (this.reasonerState) { case KB_NOT_LOADED: load(); @@ -488,7 +489,7 @@ public boolean reason() throws IOException { default: break; } - + return this.reasoningCompleted; } @@ -513,7 +514,7 @@ private void runChase() { "Knowledge base incompatible with stratified negation: either the Rules are not stratifiable, or the variables in negated atom cannot be bound.", e); } - + this.materialisationState = this.reasoningCompleted ? MaterialisationState.COMPLETE : MaterialisationState.INCOMPLETE; } @@ -539,6 +540,7 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBla "The query predicate does not occur in the loaded Knowledge Base: {0}!", query.getPredicate()), e1); } + logWarningOnMaterialisationState(); return new QueryResultIterator(stringQueryResultIterator, this.materialisationState); } @@ -563,9 +565,17 @@ public MaterialisationState exportQueryAnswersToCsv(final PositiveLiteral query, throw new IllegalArgumentException(MessageFormat.format( "The query predicate does not occur in the loaded Knowledge Base: {0}!", query.getPredicate()), e1); } + + logWarningOnMaterialisationState(); return this.materialisationState; } + private void logWarningOnMaterialisationState() { + if (this.materialisationState != MaterialisationState.COMPLETE) { + LOGGER.warn("Query answers may be {} with respect to the current Knowledge Base!", this.materialisationState); + } + } + @Override public void resetReasoner() { validateNotClosed(); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java index 208fcd4c0..4db277b56 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java @@ -28,6 +28,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; @@ -46,6 +47,7 @@ import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.Algorithm; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.MaterialisationState; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; @@ -55,14 +57,14 @@ public class ReasonerStateTest { private static final Predicate q = Expressions.makePredicate("q", 1); private static final Variable x = Expressions.makeVariable("x"); private static final Constant c = Expressions.makeConstant("c"); - // private static final Constant d = Expressions.makeConstant("d"); + private static final Constant d = Expressions.makeConstant("d"); private static final PositiveLiteral exampleQueryAtom = Expressions.makePositiveLiteral("q", x); private static final PositiveLiteral ruleHeadQx = Expressions.makePositiveLiteral(q, x); private static final PositiveLiteral ruleBodyPx = Expressions.makePositiveLiteral(p, x); private static final Rule ruleQxPx = Expressions.makeRule(ruleHeadQx, ruleBodyPx); - private static final Fact factPc = Expressions.makeFact(p, Arrays.asList(c)); - // private static final Atom factPd = Expressions.makeAtom(q, d); + private static final Fact factPc = Expressions.makeFact(p, c); + private static final Fact factPd = Expressions.makeFact(p, d); @Test(expected = NullPointerException.class) public void testSetAlgorithm() { @@ -78,6 +80,46 @@ public void testSetReasoningTimeout() { } } + @Test + public void testAddFactsAndQuery() throws IOException { + try (final Reasoner reasoner = Reasoner.getInstance();) { + reasoner.getKnowledgeBase().addStatement(factPc); + reasoner.load(); + + final PositiveLiteral query = Expressions.makePositiveLiteral(p, x); + final Set> expectedAnswersC = new HashSet<>( + Arrays.asList(Collections.singletonList(c))); + + try(final QueryResultIterator queryResult = reasoner.answerQuery(query, true)){ + assertEquals(MaterialisationState.INCOMPLETE, queryResult.getMaterialisationState()); + final Set> queryAnswersC = QueryResultsUtils.collectQueryResults(queryResult); + + assertEquals(expectedAnswersC, queryAnswersC); + } + + + reasoner.getKnowledgeBase().addStatement(factPd); + + try(final QueryResultIterator queryResult = reasoner.answerQuery(query, true)){ + assertEquals(MaterialisationState.WRONG, queryResult.getMaterialisationState()); + assertEquals(expectedAnswersC, QueryResultsUtils.collectQueryResults(queryResult)); + } + + reasoner.load(); + + + try(final QueryResultIterator queryResult = reasoner.answerQuery(query, true)){ + assertEquals(MaterialisationState.INCOMPLETE, queryResult.getMaterialisationState()); + + final Set> queryAnswersD = QueryResultsUtils.collectQueryResults(queryResult); + + final Set> expectedAnswersCD = new HashSet<>( + Arrays.asList(Collections.singletonList(c), Collections.singletonList(d))); + assertEquals(expectedAnswersCD, queryAnswersD); + } + } + } + // FIXME update test @Ignore @Test(expected = ReasonerStateException.class) From 9449d5b14541acbbfb27664210d7927053e781e8 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 22 Aug 2019 22:28:32 +0200 Subject: [PATCH 0161/1003] Log main reasoning steps; fewer println's in examples --- .../reasoner/implementation/VLogReasoner.java | 18 ++++++++++++++---- .../vlog4j/examples/CountingTriangles.java | 7 +------ .../vlog4j/examples/DoidExample.java | 7 +------ .../examples/SimpleReasoningExample.java | 6 ------ .../SkolemVsRestrictedChaseTermination.java | 8 +++----- 5 files changed, 19 insertions(+), 27 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 7a7b2011c..b97f48dd5 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -328,6 +328,7 @@ public void load() throws IOException { } void loadKnowledgeBase() throws IOException { + LOGGER.info("Started loading knowledge base ..."); final LoadKbVisitor visitor = new LoadKbVisitor(); visitor.clearIndexes(); for (final Statement statement : knowledgeBase) { @@ -355,6 +356,8 @@ void loadKnowledgeBase() throws IOException { // TODO: if there are no rules, then materialisation state is complete this.materialisationState = MaterialisationState.INCOMPLETE; + + LOGGER.info("Finished loading knowledge base."); } String getDataSourceConfigurationString() { @@ -494,6 +497,7 @@ public boolean reason() throws IOException { } private void runChase() { + LOGGER.info("Started materialisation of inferences ..."); this.reasonerState = ReasonerState.MATERIALISED; final boolean skolemChase = this.algorithm == Algorithm.SKOLEM_CHASE; @@ -504,7 +508,6 @@ private void runChase() { } else { this.reasoningCompleted = this.vLog.materialize(skolemChase, this.timeoutAfterSeconds); } - } catch (final NotStartedException e) { throw new RuntimeException("Inconsistent reasoner state.", e); } catch (final MaterializationException e) { @@ -515,8 +518,13 @@ private void runChase() { e); } - this.materialisationState = this.reasoningCompleted ? MaterialisationState.COMPLETE - : MaterialisationState.INCOMPLETE; + if (this.reasoningCompleted) { + this.materialisationState = MaterialisationState.COMPLETE; + LOGGER.info("Completed materialisation of inferences."); + } else { + this.materialisationState = MaterialisationState.INCOMPLETE; + LOGGER.info("Stopped materialisation of inferences (possibly incomplete)."); + } } @Override @@ -572,7 +580,8 @@ public MaterialisationState exportQueryAnswersToCsv(final PositiveLiteral query, private void logWarningOnMaterialisationState() { if (this.materialisationState != MaterialisationState.COMPLETE) { - LOGGER.warn("Query answers may be {} with respect to the current Knowledge Base!", this.materialisationState); + LOGGER.warn("Query answers may be {} with respect to the current Knowledge Base!", + this.materialisationState); } } @@ -589,6 +598,7 @@ public void close() { this.reasonerState = ReasonerState.CLOSED; this.knowledgeBase.deleteListener(this); this.vLog.stop(); + LOGGER.info("Reasoner closed."); } @Override diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java index d72cc644a..3a4fc8cc5 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java @@ -58,13 +58,10 @@ public static void main(final String[] args) throws IOException { try (VLogReasoner reasoner = new VLogReasoner(kb)) { /* Initialise reasoner and compute inferences */ - System.out.print("Initialising rules and data sources ... "); reasoner.load(); - System.out.println("completed."); - System.out.print("Reasoning (including SPARQL query answering) ... "); + System.out.println("Note: Materialisation includes SPARQL query answering."); reasoner.reason(); - System.out.println("completed."); /* Execute queries */ try { @@ -84,8 +81,6 @@ public static void main(final String[] args) throws IOException { } catch (final ParsingException e) { System.out.println("Failed to parse query: " + e.getMessage()); } - - System.out.println("Done."); } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java index 281f48a32..9a686b44c 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java @@ -66,13 +66,10 @@ public static void main(final String[] args) throws IOException { reasoner.setLogLevel(LogLevel.DEBUG); /* Initialise reasoner and compute inferences */ - System.out.print("Initialising rules and data sources ... "); reasoner.load(); - System.out.println("completed."); - System.out.print("Reasoning (including SPARQL query answering) ... "); + System.out.println("Note: Materialisation includes SPARQL query answering."); reasoner.reason(); - System.out.println("completed."); /* Execute some queries */ final List queries = Arrays.asList("humansWhoDiedOfCancer(?X)", "humansWhoDiedOfNoncancer(?X)"); @@ -87,8 +84,6 @@ public static void main(final String[] args) throws IOException { System.out.println("Failed to parse query: " + e.getMessage()); } } - - System.out.println("\nDone."); } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java index fea0dbe82..81a3362f9 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java @@ -77,20 +77,14 @@ public static void main(final String[] args) throws IOException { } try (final Reasoner reasoner = new VLogReasoner(kb)) { - System.out.print("Loading knowledge base ... "); reasoner.load(); - System.out.println("done."); - System.out.print("Computing all inferences ... "); reasoner.reason(); - System.out.println("done.\n"); /* Execute some queries */ ExamplesUtils.printOutQueryAnswers("address(?Org, ?Street, ?ZIP, ?City)", reasoner); ExamplesUtils.printOutQueryAnswers("locatedIn(?place, europe)", reasoner); ExamplesUtils.printOutQueryAnswers("inEuropeOutsideGermany(?Org)", reasoner); - - System.out.println("Done."); } } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java index 256d211b5..d147a6a19 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java @@ -82,9 +82,8 @@ public static void main(final String[] args) throws IOException, ParsingExceptio */ reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); reasoner.setReasoningTimeout(1); - System.out.print("Starting Skolem Chase (a.k.a. semi-oblivious chase) with 1 second timeout ... "); + System.out.println("Starting Skolem Chase (a.k.a. semi-oblivious chase) with 1 second timeout ..."); final boolean skolemChaseFinished = reasoner.reason(); - System.out.println("done."); /* Verify that the Skolem Chase did not terminate before timeout. */ System.out.println("Has Skolem Chase algorithm finished before 1 second timeout? " + skolemChaseFinished); @@ -101,7 +100,7 @@ public static void main(final String[] args) throws IOException, ParsingExceptio * We reset the reasoner and apply the Restricted Chase on the same set of rules * and facts */ - System.out.println("\nReseting reasoner; discarding facts generated during reasoning."); + System.out.println(); reasoner.resetReasoner(); reasoner.load(); @@ -119,9 +118,8 @@ public static void main(final String[] args) throws IOException, ParsingExceptio reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); reasoner.setReasoningTimeout(null); final long restrictedChaseStartTime = System.currentTimeMillis(); - System.out.print("Starting Restricted Chase (a.k.a. Standard Chase) without any timeout ... "); + System.out.println("Starting Restricted Chase (a.k.a. Standard Chase) without any timeout ... "); reasoner.reason(); - System.out.println("done."); /* The Restricted Chase terminates: */ final long restrictedChaseDuration = System.currentTimeMillis() - restrictedChaseStartTime; From 45115be91a2dfb9c5792a10cc4a7fe9f9a6fb430 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 23 Aug 2019 11:50:02 +0200 Subject: [PATCH 0162/1003] More efficient conversion of Facts to VLog --- .../implementation/ModelToVLogConverter.java | 11 +++++------ .../implementation/TermToVLogConverter.java | 15 ++++++++++----- .../reasoner/implementation/VLogReasoner.java | 3 +-- 3 files changed, 16 insertions(+), 13 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverter.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverter.java index 7ca9246ae..a83e6132c 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverter.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverter.java @@ -26,7 +26,6 @@ import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Term; @@ -66,21 +65,21 @@ static karmaresearch.vlog.Term[] toVLogTermArray(final List terms) { static String[][] toVLogFactTuples(final Collection facts) { final String[][] tuples = new String[facts.size()][]; int i = 0; - for (final PositiveLiteral atom : facts) { - final String[] vLogFactTuple = ModelToVLogConverter.toVLogFactTuple(atom); + for (final Fact fact : facts) { + final String[] vLogFactTuple = ModelToVLogConverter.toVLogFactTuple(fact); tuples[i] = vLogFactTuple; i++; } return tuples; } - static String[] toVLogFactTuple(final PositiveLiteral fact) { + static String[] toVLogFactTuple(final Fact fact) { final List terms = fact.getTerms(); final String[] vLogFactTuple = new String[terms.size()]; int i = 0; for (final Term term : terms) { - final karmaresearch.vlog.Term vLogTupleTerm = toVLogTerm(term); - vLogFactTuple[i] = vLogTupleTerm.getName(); + // No checks for type of term -- only constants allowed in facts! + vLogFactTuple[i] = TermToVLogConverter.getVLogNameForConstant(term.getName()); i++; } return vLogFactTuple; diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/TermToVLogConverter.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/TermToVLogConverter.java index 509b5978d..cfa887714 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/TermToVLogConverter.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/TermToVLogConverter.java @@ -40,12 +40,17 @@ class TermToVLogConverter implements TermVisitor { */ @Override public karmaresearch.vlog.Term visit(Constant term) { - if (term.getName().startsWith("\"")) { // keep datatype literal strings unchanged - return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, term.getName()); - } else if (term.getName().contains(":")) { // enclose IRIs with < > - return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, "<" + term.getName() + ">"); + return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, + getVLogNameForConstant(term.getName())); + } + + public static String getVLogNameForConstant(String vLog4jConstantName) { + if (vLog4jConstantName.startsWith("\"")) { // keep datatype literal strings unchanged + return vLog4jConstantName; + } else if (vLog4jConstantName.contains(":")) { // enclose IRIs with < > + return "<" + vLog4jConstantName + ">"; } else { // keep relative IRIs unchanged - return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, term.getName()); + return vLog4jConstantName; } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index b97f48dd5..cb1684627 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -443,8 +443,7 @@ void loadFacts() { this.vLog.addData(vLogPredicateName, vLogPredicateTuples); if (LOGGER.isDebugEnabled()) { for (final String[] tuple : vLogPredicateTuples) { - LOGGER.debug( - "Loaded direct fact " + vLogPredicateName + "(" + Arrays.deepToString(tuple) + ")"); + LOGGER.debug("Loaded direct fact " + vLogPredicateName + Arrays.toString(tuple)); } } } catch (final EDBConfigurationException e) { From 2bbac7b8a929db97e8e38d6185de8f79a5f2831f Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 23 Aug 2019 14:46:12 +0200 Subject: [PATCH 0163/1003] Efficient in-memory data source --- .../implementation/InMemoryDataSource.java | 84 +++++++++++++++++++ .../reasoner/implementation/VLogReasoner.java | 63 +++++++++++--- 2 files changed, 135 insertions(+), 12 deletions(-) create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java new file mode 100644 index 000000000..ac9a76569 --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java @@ -0,0 +1,84 @@ +package org.semanticweb.vlog4j.core.reasoner.implementation; + +import java.util.Arrays; + +import org.semanticweb.vlog4j.core.model.api.DataSource; + +/** + * A {@link DataSource} for representing a large number of facts that were + * generated in Java. Rather than making {@link Fact} objects for all of them, + * the object will directly accept tuples of constant names that are internally + * stored in a form that can be passed to the reasoner directly, thereby saving + * memory and loading time. + * + * @author Markus Kroetzsch + * + */ +public class InMemoryDataSource implements DataSource { + + String[][] data; + int nextEmptyTuple = 0; + int capacity; + final int arity; + + /** + * Create a new in-memory data source for facts of the specified arity. The + * given capacity is the initial size of the space allocated. For best + * efficiency, the actual number of facts should exactly correspond to this + * capacity. + * + * @param arity the number of parameters in a fact from this source + * @param initialCapacity the planned number of facts + */ + public InMemoryDataSource(int arity, int initialCapacity) { + this.capacity = initialCapacity; + this.arity = arity; + data = new String[initialCapacity][arity]; + } + + /** + * Adds a fact to this data source. The number of constant names must agree with + * the arity of this data source. + * + * @param constantNames the string names of the constants in this fact + */ + public void addTuple(String... constantNames) { + if (constantNames.length != arity) { + throw new IllegalArgumentException("This data source holds tuples of arity " + arity + + ". Adding a tuple of size " + constantNames.length + " is not possible."); + } + if (nextEmptyTuple == capacity) { + capacity = capacity * 2; + this.data = Arrays.copyOf(data, capacity); + } + data[nextEmptyTuple] = new String[arity]; + for (int i = 0; i < arity; i++) { + data[nextEmptyTuple][i] = TermToVLogConverter.getVLogNameForConstant(constantNames[i]); + } + nextEmptyTuple++; + } + + /** + * Returns the data stored in this data source, in the format expected by the + * VLog reasoner backend. + * + * @return the data + */ + public String[][] getData() { + if (nextEmptyTuple == capacity) { + return this.data; + } else { + return Arrays.copyOf(this.data, this.nextEmptyTuple); + } + } + + /** + * Returns null to indicate that this {@link DataSource} cannot be passed to + * VLog in a configuration string. + */ + @Override + public String toConfigString() { + return null; + } + +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index cb1684627..471f83d32 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -340,12 +340,13 @@ void loadKnowledgeBase() throws IOException { } try { - this.vLog.start(getDataSourceConfigurationString(), false); + this.vLog.start(getDataSourcesConfigurationString(), false); } catch (final AlreadyStartedException e) { throw new RuntimeException("Inconsistent reasoner state.", e); } catch (final EDBConfigurationException e) { throw new RuntimeException("Invalid data sources configuration.", e); } + loadInMemoryDataSources(); validateDataSourcePredicateArities(); @@ -360,30 +361,37 @@ void loadKnowledgeBase() throws IOException { LOGGER.info("Finished loading knowledge base."); } - String getDataSourceConfigurationString() { + String getDataSourcesConfigurationString() { final StringBuilder configStringBuilder = new StringBuilder(); final Formatter formatter = new Formatter(configStringBuilder); int dataSourceIndex = 0; for (final Predicate predicate : this.edbPredicates.keySet()) { final DataSourceDeclaration dataSourceDeclaration = this.edbPredicates.get(predicate); - if (dataSourceDeclaration.getDataSource() != null) { - formatter.format(dataSourceDeclaration.getDataSource().toConfigString(), dataSourceIndex, - ModelToVLogConverter.toVLogPredicate(predicate)); - dataSourceIndex++; - } + dataSourceIndex = addDataSourceConfigurationString(dataSourceDeclaration.getDataSource(), predicate, + dataSourceIndex, formatter); } for (final DataSourceDeclaration dataSourceDeclaration : this.aliasesForEdbPredicates.keySet()) { final Predicate aliasPredicate = this.aliasesForEdbPredicates.get(dataSourceDeclaration); - if (dataSourceDeclaration.getDataSource() != null) { - formatter.format(dataSourceDeclaration.getDataSource().toConfigString(), dataSourceIndex, - ModelToVLogConverter.toVLogPredicate(aliasPredicate)); - dataSourceIndex++; - } + dataSourceIndex = addDataSourceConfigurationString(dataSourceDeclaration.getDataSource(), aliasPredicate, + dataSourceIndex, formatter); } formatter.close(); return configStringBuilder.toString(); } + int addDataSourceConfigurationString(DataSource dataSource, Predicate predicate, int dataSourceIndex, + Formatter formatter) { + if (dataSource != null) { + String configString = dataSource.toConfigString(); + if (configString != null) { + formatter.format(dataSource.toConfigString(), dataSourceIndex, + ModelToVLogConverter.toVLogPredicate(predicate)); + return dataSourceIndex + 1; + } + } + return dataSourceIndex; + } + /** * Checks if the loaded external data sources do in fact contain data of the * correct arity. @@ -401,6 +409,37 @@ void validateDataSourcePredicateArities() throws IncompatiblePredicateArityExcep } } + void loadInMemoryDataSources() { + for (final Predicate predicate : this.edbPredicates.keySet()) { + final DataSourceDeclaration dataSourceDeclaration = this.edbPredicates.get(predicate); + loadInMemoryDataSource(dataSourceDeclaration.getDataSource(), predicate); + } + for (final DataSourceDeclaration dataSourceDeclaration : this.aliasesForEdbPredicates.keySet()) { + final Predicate aliasPredicate = this.aliasesForEdbPredicates.get(dataSourceDeclaration); + loadInMemoryDataSource(dataSourceDeclaration.getDataSource(), aliasPredicate); + } + } + + void loadInMemoryDataSource(DataSource dataSource, Predicate predicate) { + final InMemoryDataSource inMemoryDataSource; + if (dataSource instanceof InMemoryDataSource) { + inMemoryDataSource = (InMemoryDataSource) dataSource; + } else { + return; + } + try { + final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(predicate); + this.vLog.addData(vLogPredicateName, inMemoryDataSource.getData()); + if (LOGGER.isDebugEnabled()) { + for (final String[] tuple : inMemoryDataSource.getData()) { + LOGGER.debug("Loaded direct fact " + vLogPredicateName + Arrays.toString(tuple)); + } + } + } catch (final EDBConfigurationException e) { + throw new RuntimeException("Invalid data sources configuration.", e); + } + } + /** * Checks if the loaded external data for a given source does in fact contain * data of the correct arity for the given predidate. From 95d7216c704620c94d14446700d1dc742e55fcdb Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 23 Aug 2019 14:46:32 +0200 Subject: [PATCH 0164/1003] Example for in-memory data source --- .../vlog4j/examples/ExamplesUtils.java | 27 +++++-- .../InMemoryGraphAnalysisExample.java | 78 +++++++++++++++++++ 2 files changed, 99 insertions(+), 6 deletions(-) create mode 100644 vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java index b6a456484..7ea5be93a 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java @@ -84,10 +84,8 @@ public static void configureLogging() { * * @param queryAtom query to be answered * @param reasoner reasoner to query on - * @throws ReasonerStateException in case the reasoner has not yet been loaded. */ - public static void printOutQueryAnswers(final PositiveLiteral queryAtom, final Reasoner reasoner) - throws ReasonerStateException { + public static void printOutQueryAnswers(final PositiveLiteral queryAtom, final Reasoner reasoner) { System.out.println("Answers to query " + queryAtom + " :"); try (final QueryResultIterator answers = reasoner.answerQuery(queryAtom, true)) { answers.forEachRemaining(answer -> System.out.println(" - " + answer)); @@ -101,10 +99,8 @@ public static void printOutQueryAnswers(final PositiveLiteral queryAtom, final R * * @param queryAtom query to be answered * @param reasoner reasoner to query on - * @throws ReasonerStateException in case the reasoner has not yet been loaded. */ - public static void printOutQueryAnswers(final String queryString, final Reasoner reasoner) - throws ReasonerStateException { + public static void printOutQueryAnswers(final String queryString, final Reasoner reasoner) { try { PositiveLiteral query = RuleParser.parsePositiveLiteral(queryString); printOutQueryAnswers(query, reasoner); @@ -113,6 +109,25 @@ public static void printOutQueryAnswers(final String queryString, final Reasoner } } + /** + * Returns the number of answers returned by {@code reasoner} to the query + * ({@code queryAtom}). + * + * @param queryAtom query to be answered + * @param reasoner reasoner to query on + * @throws ReasonerStateException in case the reasoner has not yet been loaded. + */ + public static int getQueryAnswerCount(final String queryString, final Reasoner reasoner) { + try { + PositiveLiteral query = RuleParser.parsePositiveLiteral(queryString); + try (final QueryResultIterator answers = reasoner.answerQuery(query, true)) { + return iteratorSize(answers); + } + } catch (ParsingException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + /** * Returns the size of an iterator. * diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java new file mode 100644 index 000000000..cb8b88f6f --- /dev/null +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java @@ -0,0 +1,78 @@ +package org.semanticweb.vlog4j.examples; + +import java.io.IOException; + +import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.Reasoner; +import org.semanticweb.vlog4j.core.reasoner.implementation.InMemoryDataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.RuleParser; + +/** + * This example shows how to reason efficiently with data sets generated in + * Java. We generate a random graph with several million edges, check + * connectivity, and count triangles. + * + * Parameters can be modified to obtain graphs of different sizes and density. + * It should be noted, however, that the number of triangles in reasonably dense + * graphs tends to be huge, and it is easy to exhaust memory in this way. + * + * @author Markus Kroetzsch + * + */ +public class InMemoryGraphAnalysisExample { + + public static void main(String[] args) throws ParsingException, IOException { + ExamplesUtils.configureLogging(); + + /* 1. Create a simple random graph */ + System.out.println("Generating random graph ..."); + int vertexCount = 10000; + double density = 0.03; + // initialise data source for storing edges (estimate how many we'll need) + InMemoryDataSource edges = new InMemoryDataSource(2, (int) (vertexCount * vertexCount * density) + 1000); + int edgeCount = 0; + for (int i = 1; i <= vertexCount; i++) { + for (int j = 1; j <= vertexCount; j++) { + if (Math.random() < density) { + edges.addTuple("v" + i, "v" + j); + edgeCount++; + } + } + } + // also make a unary data source to mark vertices: + InMemoryDataSource vertices = new InMemoryDataSource(1, vertexCount); + for (int i = 1; i <= vertexCount; i++) { + vertices.addTuple("v" + i); + } + System.out.println("Generated " + edgeCount + " edges in random graph of " + vertexCount + " vertices."); + + /* 2. Initialise database with random data and some rules */ + + final String rules = "" // + + "biedge(?X,?Y) :- edge(?X,?Y), edge(?Y,?X) ." // + + "connected(v1) ." // + + "connected(?X) :- connected(?Y), biedge(?Y,?X) ." // + + "unreachable(?X) :- vertex(?X), ~connected(?X) . " // + + "triangle(?X, ?Y, ?Z) :- biedge(?X,?Y), biedge(?Y, ?Z), biedge(?Z,?X) ."; + + final KnowledgeBase kb = RuleParser.parse(rules); + kb.addStatement(new DataSourceDeclarationImpl(Expressions.makePredicate("vertex", 1), vertices)); + kb.addStatement(new DataSourceDeclarationImpl(Expressions.makePredicate("edge", 2), edges)); + + /* 3. Use reasoner to compute some query results */ + try (final Reasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + reasoner.reason(); + + System.out.println("Number of vertices not reachable from vertex 1 by a bi-directional path: " + + ExamplesUtils.getQueryAnswerCount("unreachable(?X)", reasoner)); + System.out.println("Number of bi-directional triangles: " + + ExamplesUtils.getQueryAnswerCount("triangle(?X,?Y,?Z)", reasoner) / 6); + } + } + +} From 673139e86970856df0e0ad19f27b0d9b948c4bc4 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 23 Aug 2019 23:11:22 +0200 Subject: [PATCH 0165/1003] new example to compare DBpedia with Wikidata --- .../examples/CompareWikidataDBpedia.java | 84 +++++++++++++++++++ 1 file changed, 84 insertions(+) create mode 100644 vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java new file mode 100644 index 000000000..4eaa42e3c --- /dev/null +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java @@ -0,0 +1,84 @@ +package org.semanticweb.vlog4j.examples; + +import java.io.IOException; + +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.Reasoner; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.RuleParser; + +/** + * This example shows how to integrate and compare the contents of two SPARQL + * endpoints, in this case for Wikidata and DBpedia. We are asking both sources + * for the same information (each using their terms to express it), and query + * for related English Wikipedia article URLs as a key to integrate the data + * over. For a fair comparison, we restrict to Wikidata entities that have a + * related English Wikipedia page (others cannot be in English DBpedia in the + * first place). + * + * The example query used asks for alumni of the University of Leipzig (one of + * the oldest European universities). + * + * @author Markus Kroetzsch + * + */ +public class CompareWikidataDBpedia { + + /** + * SPARQL pattern snippet to find an English Wikipedia page URL from a Wikidata + * entity ?result. + */ + static String sparqlGetWikiIriWikidata = "?enwikipage schema:about ?result ; " + + "schema:isPartOf . "; + /** + * SPARQL pattern snippet to find an English Wikipedia page URL from a DBpedia + * entity ?result. Some string magic is needed to replace the outdated http + * protocol used in DBpedia's Wikidata page names by the current https. + */ + static String sparqlGetWikiIriDBpedia = "?result ?enwikipageHttp . " + + "BIND( IRI(CONCAT(\"https\",SUBSTR(str(?enwikipageHttp), 5))) AS ?enwikipage)"; + + public static void main(String[] args) throws ParsingException, IOException { + ExamplesUtils.configureLogging(); + + // Wikidata pattern: P69 is "educated at"; Q154804 is "University of Leipzig" + String wikidataSparql = "?result wdt:P69 wd:Q154804 . " + sparqlGetWikiIriWikidata; + // DBpedia pattern: + String dbpediaSparql = "?result . " + + sparqlGetWikiIriDBpedia; + + // Configure the SPARQL data sources and some rules to analyse results: + String rules = "" // + + "@prefix wdqs: ." // + + "@prefix dbp: ." // + + "@source dbpResult(2) : sparql(dbp:sparql, \"result,enwikipage\", '''" + dbpediaSparql + "''') ." // + + "@source wdResult(2) : sparql(wdqs:sparql, \"result,enwikipage\", '''" + wikidataSparql + "''') ." // + + "% Rules:\n" // + + "inWd(?Wikipage) :- wdResult(?WdId,?Wikipage)." // + + "inDbp(?Wikipage) :- dbpResult(?DbpId,?Wikipage)." // + + "result(?Wikipage) :- inWd(?Wikipage)." // + + "result(?Wikipage) :- inDbp(?Wikipage)." // + + "match(?WdId,?DbpId) :- dbpResult(?DbpId,?Wikipage), wdResult(?WdId,?Wikipage)." + + "dbpOnly(?DbpId,?Wikipage) :- dbpResult(?DbpId,?Wikipage), ~inWd(?Wikipage)." + + "wdpOnly(?WdId,?Wikipage) :- wdResult(?WdId,?Wikipage), ~inDbp(?Wikipage)." + ""; // + + final KnowledgeBase kb = RuleParser.parse(rules); + + try (final Reasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + reasoner.reason(); + + int resultCount = ExamplesUtils.getQueryAnswerCount("result(?X)", reasoner); + int wdCount = ExamplesUtils.getQueryAnswerCount("inWd(?X)", reasoner); + int dbpCount = ExamplesUtils.getQueryAnswerCount("inDbp(?X)", reasoner); + + System.out.println("Found " + resultCount + " matching entities overall, of which " + wdCount + + " were in Wikidata and " + dbpCount + " were in DBPedia"); + + System.out.println("We focus on results found in DBpedia only (usually the smaller set)."); + ExamplesUtils.printOutQueryAnswers("dbpOnly(?X,?Y)", reasoner); + } + } + +} From 56a5ad06086066823dabca21530c32cd6b94f5a5 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 23 Aug 2019 23:15:28 +0200 Subject: [PATCH 0166/1003] nicer output --- .../semanticweb/vlog4j/examples/CompareWikidataDBpedia.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java index 4eaa42e3c..d716b76bf 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java @@ -60,7 +60,7 @@ public static void main(String[] args) throws ParsingException, IOException { + "result(?Wikipage) :- inWd(?Wikipage)." // + "result(?Wikipage) :- inDbp(?Wikipage)." // + "match(?WdId,?DbpId) :- dbpResult(?DbpId,?Wikipage), wdResult(?WdId,?Wikipage)." - + "dbpOnly(?DbpId,?Wikipage) :- dbpResult(?DbpId,?Wikipage), ~inWd(?Wikipage)." + + "dbpOnly(?Wikipage) :- inDbp(?Wikipage), ~inWd(?Wikipage)." + "wdpOnly(?WdId,?Wikipage) :- wdResult(?WdId,?Wikipage), ~inDbp(?Wikipage)." + ""; // final KnowledgeBase kb = RuleParser.parse(rules); @@ -77,7 +77,7 @@ public static void main(String[] args) throws ParsingException, IOException { + " were in Wikidata and " + dbpCount + " were in DBPedia"); System.out.println("We focus on results found in DBpedia only (usually the smaller set)."); - ExamplesUtils.printOutQueryAnswers("dbpOnly(?X,?Y)", reasoner); + ExamplesUtils.printOutQueryAnswers("dbpOnly(?X)", reasoner); } } From 5d2ebd6ce4aa7b59793d1443a57e4f92e97ed9dd Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 25 Aug 2019 14:26:56 +0200 Subject: [PATCH 0167/1003] Add note about imperfection of mapping --- .../semanticweb/vlog4j/examples/CompareWikidataDBpedia.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java index d716b76bf..460888c59 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java @@ -78,6 +78,10 @@ public static void main(String[] args) throws ParsingException, IOException { System.out.println("We focus on results found in DBpedia only (usually the smaller set)."); ExamplesUtils.printOutQueryAnswers("dbpOnly(?X)", reasoner); + + System.out.println("Note: some of these results might still be in Wikidata, due to:\n" + + "* recent Wikipedia article renamings that are not updated in DBpedia\n" + + "* failure to match Wikipedia URLs due to small differences in character encoding\n"); } } From 61c66080ee10793962951961b4b6354e7103b42d Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 27 Aug 2019 10:45:39 +0200 Subject: [PATCH 0168/1003] Warn & return empty result (instead of exception) --- .../core/reasoner/QueryResultIterator.java | 32 ++++++++++ .../vlog4j/core/reasoner/Reasoner.java | 1 - .../EmptyQueryResultIterator.java | 60 +++++++++++++++++++ .../implementation/InMemoryDataSource.java | 20 +++++++ ...ator.java => VLogQueryResultIterator.java} | 7 +-- .../reasoner/implementation/VLogReasoner.java | 8 ++- .../vlog4j/core/reasoner/LoggingTest.java | 5 +- .../implementation/AddDataSourceTest.java | 1 + .../implementation/AnswerQueryTest.java | 16 +++-- .../FileDataSourceTestUtils.java | 1 + .../implementation/QueryResultsUtils.java | 1 + .../implementation/ReasonerStateTest.java | 1 + .../implementation/VLogReasonerBasics.java | 1 + .../VLogReasonerCombinedInputs.java | 1 + .../implementation/VLogReasonerCsvInput.java | 1 + .../implementation/VLogReasonerNegation.java | 1 + .../implementation/VLogReasonerRdfInput.java | 1 + .../VLogReasonerSparqlInput.java | 1 + .../examples/CompareWikidataDBpedia.java | 20 +++++++ .../vlog4j/examples/DoidExample.java | 2 +- .../vlog4j/examples/ExamplesUtils.java | 4 +- .../InMemoryGraphAnalysisExample.java | 20 +++++++ .../core/AddDataFromSparqlQueryResults.java | 2 +- .../SkolemVsRestrictedChaseTermination.java | 2 +- .../examples/graal/DoidExampleGraal.java | 2 +- .../owlapi/OwlOntologyToRulesAndFacts.java | 2 +- .../examples/rdf/AddDataFromRdfModel.java | 2 +- .../vlog4j/rdf/TestReasonOverRdfFacts.java | 2 +- 28 files changed, 194 insertions(+), 23 deletions(-) create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryResultIterator.java create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/EmptyQueryResultIterator.java rename vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/{QueryResultIterator.java => VLogQueryResultIterator.java} (89%) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryResultIterator.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryResultIterator.java new file mode 100644 index 000000000..c2a7ee746 --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryResultIterator.java @@ -0,0 +1,32 @@ +package org.semanticweb.vlog4j.core.reasoner; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.Iterator; + +import org.semanticweb.vlog4j.core.model.api.QueryResult; + +public interface QueryResultIterator extends Iterator, AutoCloseable { + + public MaterialisationState getMaterialisationState(); + + public void close(); +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index ff4654864..d8872e987 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -8,7 +8,6 @@ import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.TermType; -import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; import karmaresearch.vlog.Atom; diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/EmptyQueryResultIterator.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/EmptyQueryResultIterator.java new file mode 100644 index 000000000..268c27371 --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/EmptyQueryResultIterator.java @@ -0,0 +1,60 @@ +package org.semanticweb.vlog4j.core.reasoner.implementation; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.vlog4j.core.model.api.QueryResult; +import org.semanticweb.vlog4j.core.reasoner.MaterialisationState; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; + +/** + * Iterator that represents an empty query result. + * + * @author Markus Kroetzsch + * + */ +public class EmptyQueryResultIterator implements QueryResultIterator { + + final MaterialisationState materialisationState; + + public EmptyQueryResultIterator(MaterialisationState materialisationState) { + this.materialisationState = materialisationState; + } + + @Override + public void close() { + // nothing to do + } + + @Override + public boolean hasNext() { + return false; + } + + @Override + public QueryResult next() { + return null; + } + + public MaterialisationState getMaterialisationState() { + return this.materialisationState; + } + +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java index ac9a76569..0071c210c 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java @@ -1,5 +1,25 @@ package org.semanticweb.vlog4j.core.reasoner.implementation; +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import java.util.Arrays; import org.semanticweb.vlog4j.core.model.api.DataSource; diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultIterator.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogQueryResultIterator.java similarity index 89% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultIterator.java rename to vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogQueryResultIterator.java index 3e0933c75..fe91caae8 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultIterator.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogQueryResultIterator.java @@ -20,10 +20,9 @@ * #L% */ -import java.util.Iterator; - import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.reasoner.MaterialisationState; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import karmaresearch.vlog.Term; import karmaresearch.vlog.TermQueryResultIterator; @@ -35,14 +34,14 @@ * @author Irina Dragoste * */ -public class QueryResultIterator implements Iterator, AutoCloseable { +public class VLogQueryResultIterator implements QueryResultIterator { private final TermQueryResultIterator vLogTermQueryResultIterator; private final MaterialisationState materialisationState; // TODO add reasoningState to constructor - public QueryResultIterator(final TermQueryResultIterator termQueryResultIterator, + public VLogQueryResultIterator(final TermQueryResultIterator termQueryResultIterator, final MaterialisationState materialisationState) { this.vLogTermQueryResultIterator = termQueryResultIterator; this.materialisationState = materialisationState; diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 471f83d32..1eaf10614 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -35,6 +35,7 @@ import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.LogLevel; import org.semanticweb.vlog4j.core.reasoner.MaterialisationState; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.ReasonerState; import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; @@ -582,12 +583,13 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBla } catch (final NotStartedException e) { throw new RuntimeException("Inconsistent reasoner state.", e); } catch (final NonExistingPredicateException e1) { - throw new IllegalArgumentException(MessageFormat.format( - "The query predicate does not occur in the loaded Knowledge Base: {0}!", query.getPredicate()), e1); + LOGGER.warn("Query uses predicate " + query.getPredicate() + + " that does not occur in the knowledge base. Answer must be empty."); + return new EmptyQueryResultIterator(MaterialisationState.COMPLETE); } logWarningOnMaterialisationState(); - return new QueryResultIterator(stringQueryResultIterator, this.materialisationState); + return new VLogQueryResultIterator(stringQueryResultIterator, this.materialisationState); } @Override diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java index 9134e43dc..dbc3c7bc7 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java @@ -1,5 +1,6 @@ package org.semanticweb.vlog4j.core.reasoner; +import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; @@ -177,7 +178,7 @@ public void testLogLevelDefault() throws IOException { final int countLinesReasonLogLevelDefault = readFile(defaultLogFilePath); final String warningLogFilePath = LOGS_FOLDER + System.currentTimeMillis() + "-testLogLevelDefault.log"; - assertFalse(new File(warningLogFilePath).exists()); + //assertFalse(new File(warningLogFilePath).exists()); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.setLogFile(warningLogFilePath); @@ -188,7 +189,7 @@ public void testLogLevelDefault() throws IOException { } final int countLinesReasonLogLevelWarning = readFile(warningLogFilePath); - assertTrue(countLinesReasonLogLevelDefault == countLinesReasonLogLevelWarning); + assertEquals(countLinesReasonLogLevelDefault, countLinesReasonLogLevelWarning); } private int readFile(final String logFilePath) throws IOException, FileNotFoundException { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java index a53a28fa2..c3d1ec8b8 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java @@ -39,6 +39,7 @@ import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; public class AddDataSourceTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java index 90b6fa877..fa8c39a65 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java @@ -28,6 +28,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Set; @@ -45,6 +46,7 @@ import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.Algorithm; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; @@ -241,7 +243,7 @@ public void queryResultWithBlanks() throws IOException { } } - @Test(expected = IllegalArgumentException.class) + @Test public void queryEmptyKnowledgeBaseBeforeReasoning() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -249,11 +251,14 @@ public void queryEmptyKnowledgeBaseBeforeReasoning() throws IOException { reasoner.load(); final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("P", Expressions.makeVariable("?x")); - reasoner.answerQuery(queryAtom, true); + try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtom, true) ) { + final Set> queryResults = QueryResultsUtils.collectQueryResults(queryResultIterator); + assertEquals(Collections.EMPTY_SET, queryResults); + } } } - @Test(expected = IllegalArgumentException.class) + @Test public void queryEmptyKnowledgeBaseAfterReasoning() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -263,7 +268,10 @@ public void queryEmptyKnowledgeBaseAfterReasoning() throws IOException { reasoner.reason(); final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("P", Expressions.makeVariable("?x")); - reasoner.answerQuery(queryAtom, true); + try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtom, true) ) { + final Set> queryResults = QueryResultsUtils.collectQueryResults(queryResultIterator); + assertEquals(Collections.EMPTY_SET, queryResults); + } } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java index 7898e0e4e..a2b1b8036 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java @@ -38,6 +38,7 @@ import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.reasoner.Algorithm; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; /** diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultsUtils.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultsUtils.java index dae331856..1ec594328 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultsUtils.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultsUtils.java @@ -28,6 +28,7 @@ import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; /** * Utility class with static methods for collecting the results of a query for diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java index 4db277b56..e03a0d4aa 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java @@ -48,6 +48,7 @@ import org.semanticweb.vlog4j.core.reasoner.Algorithm; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.MaterialisationState; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerBasics.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerBasics.java index 7be7634e2..e3d87cef4 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerBasics.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerBasics.java @@ -38,6 +38,7 @@ import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; public class VLogReasonerBasics { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java index 43d54c2dc..7b11e32b1 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java @@ -42,6 +42,7 @@ import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; public class VLogReasonerCombinedInputs { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvInput.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvInput.java index b9e5580e6..2bcbdfaa8 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvInput.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvInput.java @@ -41,6 +41,7 @@ import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; public class VLogReasonerCsvInput { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerNegation.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerNegation.java index 801556108..1e9d3a113 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerNegation.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerNegation.java @@ -42,6 +42,7 @@ import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; public class VLogReasonerNegation { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerRdfInput.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerRdfInput.java index 3b72ab8cd..bf7d6aca2 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerRdfInput.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerRdfInput.java @@ -41,6 +41,7 @@ import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; public class VLogReasonerRdfInput { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerSparqlInput.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerSparqlInput.java index 2922c48e4..4e31e2b8a 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerSparqlInput.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerSparqlInput.java @@ -37,6 +37,7 @@ import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; public class VLogReasonerSparqlInput { diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java index 460888c59..93ce773fe 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java @@ -1,5 +1,25 @@ package org.semanticweb.vlog4j.examples; +/*- + * #%L + * VLog4j Examples + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import java.io.IOException; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java index 9a686b44c..bfbd3a3d7 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java @@ -28,8 +28,8 @@ import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.LogLevel; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java index 7ea5be93a..929bb1832 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java @@ -33,8 +33,8 @@ import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; @@ -72,7 +72,7 @@ public static void configureLogging() { String pattern = "%d{yyyy-MM-dd HH:mm:ss} %-5p - %m%n"; consoleAppender.setLayout(new PatternLayout(pattern)); // Change to Level.ERROR for fewer messages: - consoleAppender.setThreshold(Level.INFO); + consoleAppender.setThreshold(Level.DEBUG); consoleAppender.activateOptions(); Logger.getRootLogger().addAppender(consoleAppender); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java index cb8b88f6f..05d0b65ef 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java @@ -1,5 +1,25 @@ package org.semanticweb.vlog4j.examples; +/*- + * #%L + * VLog4j Examples + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import java.io.IOException; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java index 21b403dcb..40084ccdb 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java @@ -36,8 +36,8 @@ import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; import org.semanticweb.vlog4j.examples.ExamplesUtils; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java index d147a6a19..aef1c2df4 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java @@ -25,7 +25,7 @@ import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.reasoner.Algorithm; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; import org.semanticweb.vlog4j.examples.ExamplesUtils; import org.semanticweb.vlog4j.parser.ParsingException; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java index f203d401f..5d27f95e1 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java @@ -35,8 +35,8 @@ import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java index 3c3834942..3f14f1556 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java @@ -37,7 +37,7 @@ import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; import org.semanticweb.vlog4j.examples.ExamplesUtils; import org.semanticweb.vlog4j.owlapi.OwlToRulesConverter; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java index d5a81a085..b3839f86a 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java @@ -44,7 +44,7 @@ import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; import org.semanticweb.vlog4j.examples.ExamplesUtils; import org.semanticweb.vlog4j.parser.ParsingException; diff --git a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java b/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java index ea3dd7a46..e1c32fc63 100644 --- a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java +++ b/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java @@ -46,8 +46,8 @@ import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; public class TestReasonOverRdfFacts { From feac6b7717f0501a06d33b26cdf7f1165fd76b6a Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 27 Aug 2019 11:29:34 +0200 Subject: [PATCH 0169/1003] modified unit test to remove logs from dir before --- vlog4j-core/src/test/data/logs/.keep | 0 .../vlog4j/core/reasoner/LoggingTest.java | 32 ++++++++++++++----- 2 files changed, 24 insertions(+), 8 deletions(-) delete mode 100644 vlog4j-core/src/test/data/logs/.keep diff --git a/vlog4j-core/src/test/data/logs/.keep b/vlog4j-core/src/test/data/logs/.keep deleted file mode 100644 index e69de29bb..000000000 diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java index dbc3c7bc7..5e7806fed 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java @@ -32,6 +32,7 @@ import java.io.IOException; import java.util.Arrays; +import org.junit.BeforeClass; import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Fact; @@ -43,7 +44,7 @@ public class LoggingTest { - public static final String LOGS_FOLDER = "src/test/data/logs/"; + public static final String LOGS_DIRECTORY = "src/test/data/logs/"; private static final Variable vx = Expressions.makeVariable("x"); // p(?x) -> q(?x) @@ -60,6 +61,21 @@ public class LoggingTest { kb.addStatements(rule, factPc); } + @BeforeClass + public static void emptyLogDirectory() { + + final File logsDir = new File(LOGS_DIRECTORY); + + if (!logsDir.exists()) { + logsDir.mkdir(); + } + + final File[] listFiles = logsDir.listFiles(); + for (final File file : listFiles) { + file.delete(); + } + } + // TODO remaining tests: change log file // TODO remaining tests: test that the log level and the log files can be set // any time @@ -78,7 +94,7 @@ public void testSetLogFileNull() throws IOException { @Test public void testSetLogFileInexistent() throws IOException { - final String inexistentFilePath = LOGS_FOLDER + "a/b"; + final String inexistentFilePath = LOGS_DIRECTORY + "a/b"; try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.setLogFile(inexistentFilePath); @@ -101,7 +117,7 @@ public void testSetLogLevelNull() { @Test public void testSetLogFileAppendsToFile() throws IOException { - final String logFilePath = LOGS_FOLDER + System.currentTimeMillis() + "-testSetLogFileAppendsToFile.log"; + final String logFilePath = LOGS_DIRECTORY + System.currentTimeMillis() + "-testSetLogFileAppendsToFile.log"; assertFalse(new File(logFilePath).exists()); int countLinesBeforeReset = 0; @@ -126,7 +142,7 @@ public void testSetLogFileAppendsToFile() throws IOException { @Test public void testLogLevelInfo() throws IOException { - final String logFilePath = LOGS_FOLDER + System.currentTimeMillis() + "-testLogLevelInfo.log"; + final String logFilePath = LOGS_DIRECTORY + System.currentTimeMillis() + "-testLogLevelInfo.log"; assertFalse(new File(logFilePath).exists()); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { @@ -145,7 +161,7 @@ public void testLogLevelInfo() throws IOException { @Test public void testLogLevelDebug() throws IOException { - final String logFilePath = LOGS_FOLDER + System.currentTimeMillis() + "-testLogLevelDebug.log"; + final String logFilePath = LOGS_DIRECTORY + System.currentTimeMillis() + "-testLogLevelDebug.log"; assertFalse(new File(logFilePath).exists()); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { @@ -165,7 +181,7 @@ public void testLogLevelDebug() throws IOException { @Test public void testLogLevelDefault() throws IOException { - final String defaultLogFilePath = LOGS_FOLDER + System.currentTimeMillis() + "-testLogLevelDefault.log"; + final String defaultLogFilePath = LOGS_DIRECTORY + System.currentTimeMillis() + "-testLogLevelDefault.log"; assertFalse(new File(defaultLogFilePath).exists()); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { @@ -177,8 +193,8 @@ public void testLogLevelDefault() throws IOException { } final int countLinesReasonLogLevelDefault = readFile(defaultLogFilePath); - final String warningLogFilePath = LOGS_FOLDER + System.currentTimeMillis() + "-testLogLevelDefault.log"; - //assertFalse(new File(warningLogFilePath).exists()); + final String warningLogFilePath = LOGS_DIRECTORY + System.currentTimeMillis() + "-testLogLevelDefault.log"; + assertFalse(new File(warningLogFilePath).exists()); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.setLogFile(warningLogFilePath); From 362c8e538b4a1d94845384ea12c93f9d650a1592 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 27 Aug 2019 11:59:30 +0200 Subject: [PATCH 0170/1003] if only facts are loaded, and reasoner in KB_LOADED state, then materialisation status is COMPLETE --- .../vlog4j/core/reasoner/implementation/VLogReasoner.java | 6 +++--- .../core/reasoner/implementation/ReasonerStateTest.java | 5 ++--- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 1eaf10614..d8372e42a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -356,8 +356,8 @@ void loadKnowledgeBase() throws IOException { this.reasonerState = ReasonerState.KB_LOADED; - // TODO: if there are no rules, then materialisation state is complete - this.materialisationState = MaterialisationState.INCOMPLETE; + // if there are no rules, then materialisation state is complete + this.materialisationState = rules.isEmpty()? MaterialisationState.COMPLETE: MaterialisationState.INCOMPLETE; LOGGER.info("Finished loading knowledge base."); } @@ -383,7 +383,7 @@ String getDataSourcesConfigurationString() { int addDataSourceConfigurationString(DataSource dataSource, Predicate predicate, int dataSourceIndex, Formatter formatter) { if (dataSource != null) { - String configString = dataSource.toConfigString(); + final String configString = dataSource.toConfigString(); if (configString != null) { formatter.format(dataSource.toConfigString(), dataSourceIndex, ModelToVLogConverter.toVLogPredicate(predicate)); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java index e03a0d4aa..a8eb3c923 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java @@ -92,7 +92,7 @@ public void testAddFactsAndQuery() throws IOException { Arrays.asList(Collections.singletonList(c))); try(final QueryResultIterator queryResult = reasoner.answerQuery(query, true)){ - assertEquals(MaterialisationState.INCOMPLETE, queryResult.getMaterialisationState()); + assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); final Set> queryAnswersC = QueryResultsUtils.collectQueryResults(queryResult); assertEquals(expectedAnswersC, queryAnswersC); @@ -108,9 +108,8 @@ public void testAddFactsAndQuery() throws IOException { reasoner.load(); - try(final QueryResultIterator queryResult = reasoner.answerQuery(query, true)){ - assertEquals(MaterialisationState.INCOMPLETE, queryResult.getMaterialisationState()); + assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); final Set> queryAnswersD = QueryResultsUtils.collectQueryResults(queryResult); From 2af6c186a7e8ff815e3d03ababb27c2f8569adec Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 27 Aug 2019 12:06:58 +0200 Subject: [PATCH 0171/1003] make sure KB statements are unmodifyiable --- .../semanticweb/vlog4j/core/reasoner/KnowledgeBase.java | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java index 64c6a955d..6517a97ca 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -286,17 +286,18 @@ void addFact(Fact fact) { * Returns all {@link Statement}s of this knowledge base. * * The result can be iterated over and will return statements in the original - * order. + * order. The collection is read-only and cannot be modified to add or delete + * statements. * * @return a collection of statements */ public Collection getStatements() { - return this.statements; + return Collections.unmodifiableCollection(this.statements); } @Override public Iterator iterator() { - return this.statements.iterator(); + return Collections.unmodifiableCollection(this.statements).iterator(); } } From bd97a30c6badf1dee115e2741d36f1702b7fac89 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 27 Aug 2019 12:18:48 +0200 Subject: [PATCH 0172/1003] made fields private in KnowledgeBase --- .../vlog4j/core/reasoner/KnowledgeBase.java | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java index 6517a97ca..7e716479b 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -62,7 +62,7 @@ public class KnowledgeBase implements Iterable { * @author Markus Kroetzsch * */ - class AddStatementVisitor implements StatementVisitor { + private class AddStatementVisitor implements StatementVisitor { @Override public Boolean visit(Fact statement) { addFact(statement); @@ -81,9 +81,9 @@ public Boolean visit(DataSourceDeclaration statement) { } } - final AddStatementVisitor addStatementVisitor = new AddStatementVisitor(); + private final AddStatementVisitor addStatementVisitor = new AddStatementVisitor(); - class ExtractStatementsVisitor implements StatementVisitor { + private class ExtractStatementsVisitor implements StatementVisitor { final ArrayList extracted = new ArrayList<>(); final Class ownType; @@ -127,7 +127,7 @@ public Void visit(DataSourceDeclaration statement) { /** * The primary storage for the contents of the knowledge base. */ - final LinkedHashSet statements = new LinkedHashSet<>(); + private final LinkedHashSet statements = new LinkedHashSet<>(); /** * Known prefixes that can be used to pretty-print the contents of the knowledge @@ -139,13 +139,13 @@ public Void visit(DataSourceDeclaration statement) { /** * Index structure that organises all facts by their predicate. */ - final Map> factsByPredicate = new HashMap<>(); + private final Map> factsByPredicate = new HashMap<>(); /** * Index structure that holds all data source declarations of this knowledge * base. */ - final Set dataSourceDeclarations = new HashSet<>(); + private final Set dataSourceDeclarations = new HashSet<>(); /** * Registers a listener for changes on the knowledge base From fa24e6fb13bdd0893a30da22d01d7014cdc60ccf Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 27 Aug 2019 15:37:14 +0200 Subject: [PATCH 0173/1003] Fixed test to be more robust --- .../semanticweb/vlog4j/core/reasoner/LoggingTest.java | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java index 5e7806fed..1455601df 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java @@ -117,7 +117,7 @@ public void testSetLogLevelNull() { @Test public void testSetLogFileAppendsToFile() throws IOException { - final String logFilePath = LOGS_DIRECTORY + System.currentTimeMillis() + "-testSetLogFileAppendsToFile.log"; + final String logFilePath = LOGS_DIRECTORY + "-testSetLogFileAppendsToFile.log"; assertFalse(new File(logFilePath).exists()); int countLinesBeforeReset = 0; @@ -142,7 +142,7 @@ public void testSetLogFileAppendsToFile() throws IOException { @Test public void testLogLevelInfo() throws IOException { - final String logFilePath = LOGS_DIRECTORY + System.currentTimeMillis() + "-testLogLevelInfo.log"; + final String logFilePath = LOGS_DIRECTORY + "-testLogLevelInfo.log"; assertFalse(new File(logFilePath).exists()); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { @@ -161,7 +161,7 @@ public void testLogLevelInfo() throws IOException { @Test public void testLogLevelDebug() throws IOException { - final String logFilePath = LOGS_DIRECTORY + System.currentTimeMillis() + "-testLogLevelDebug.log"; + final String logFilePath = LOGS_DIRECTORY + "-testLogLevelDebug.log"; assertFalse(new File(logFilePath).exists()); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { @@ -181,7 +181,7 @@ public void testLogLevelDebug() throws IOException { @Test public void testLogLevelDefault() throws IOException { - final String defaultLogFilePath = LOGS_DIRECTORY + System.currentTimeMillis() + "-testLogLevelDefault.log"; + final String defaultLogFilePath = LOGS_DIRECTORY + "-testLogLevelDefault.log"; assertFalse(new File(defaultLogFilePath).exists()); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { @@ -193,7 +193,7 @@ public void testLogLevelDefault() throws IOException { } final int countLinesReasonLogLevelDefault = readFile(defaultLogFilePath); - final String warningLogFilePath = LOGS_DIRECTORY + System.currentTimeMillis() + "-testLogLevelDefault.log"; + final String warningLogFilePath = LOGS_DIRECTORY + "-testLogLevelDefault2.log"; assertFalse(new File(warningLogFilePath).exists()); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { From 66ca9e1200370060e5de6d1c17536802d81690b0 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 27 Aug 2019 15:37:54 +0200 Subject: [PATCH 0174/1003] Support parsing into existing KB --- .../semanticweb/vlog4j/parser/RuleParser.java | 18 ++++++++++++++++-- .../vlog4j/parser/javacc/JavaCCParserBase.java | 10 +++++++++- 2 files changed, 25 insertions(+), 3 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index b91cfd6dc..d27d0ffa5 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -36,13 +36,27 @@ /** * Class to statically access VLog parsing functionality. * - * @FIXME Support parsing from multiple files (into one KB). - * * @author Markus Kroetzsch * */ public class RuleParser { + public static void parseInto(KnowledgeBase knowledgeBase, InputStream stream, String encoding) + throws ParsingException { + JavaCCParser javaCcParser = new JavaCCParser(stream, encoding); + javaCcParser.setKnowledgeBase(knowledgeBase); + doParse(javaCcParser); + } + + public static void parseInto(KnowledgeBase knowledgeBase, InputStream stream) throws ParsingException { + parseInto(knowledgeBase, stream, "UTF-8"); + } + + public static void parseInto(KnowledgeBase knowledgeBase, String input) throws ParsingException { + InputStream inputStream = new ByteArrayInputStream(input.getBytes()); + parseInto(knowledgeBase, inputStream, "UTF-8"); + } + public static KnowledgeBase parse(InputStream stream, String encoding) throws ParsingException { return doParse(new JavaCCParser(stream, encoding)); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 52e144235..427dc9143 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -48,7 +48,7 @@ public class JavaCCParserBase { final PrefixDeclarations prefixDeclarations = new LocalPrefixDeclarations(); - final KnowledgeBase knowledgeBase = new KnowledgeBase(); + KnowledgeBase knowledgeBase; /** * "Local" variable to remember (universal) body variables during parsing. @@ -81,6 +81,10 @@ public enum FormulaContext { */ BODY } + + public JavaCCParserBase() { + this.knowledgeBase = new KnowledgeBase(); + } Constant createIntegerLiteral(String lexicalForm) { return Expressions.makeConstant(lexicalForm + "^^<" + PrefixDeclarations.XSD_INTEGER + ">"); @@ -211,6 +215,10 @@ void resetVariableSets() { this.headExiVars.clear(); this.headUniVars.clear(); } + + public void setKnowledgeBase(KnowledgeBase knowledgeBase) { + this.knowledgeBase = knowledgeBase; + } public KnowledgeBase getKnowledgeBase() { return knowledgeBase; From 39e95047414df072477aff7dcfaae386ddf0fa2f Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 27 Aug 2019 15:43:08 +0200 Subject: [PATCH 0175/1003] rewrite unit test --- .../implementation/AddDataSourceTest.java | 166 +++++++++++++++--- 1 file changed, 137 insertions(+), 29 deletions(-) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java index c3d1ec8b8..37c62d6d4 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java @@ -1,6 +1,7 @@ package org.semanticweb.vlog4j.core.reasoner.implementation; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; /*- * #%L @@ -25,12 +26,12 @@ import java.io.File; import java.io.IOException; import java.util.Arrays; +import java.util.HashSet; import java.util.List; import java.util.Set; import org.junit.Ignore; import org.junit.Test; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.Fact; @@ -39,11 +40,20 @@ import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.MaterialisationState; import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; public class AddDataSourceTest { - private static final String CSV_FILE_PATH = FileDataSourceTestUtils.INPUT_FOLDER + "unaryFacts.csv"; + private static final String CSV_FILE_c1_c2_PATH = FileDataSourceTestUtils.INPUT_FOLDER + "unaryFacts.csv"; + + private static final String CSV_FILE_c_d_PATH = FileDataSourceTestUtils.INPUT_FOLDER + "unaryFactsCD.csv"; + + private final Set> csvFile_c1_c2_Content = new HashSet<>(Arrays + .asList(Arrays.asList(Expressions.makeConstant("c1")), Arrays.asList(Expressions.makeConstant("c2")))); + + private final Set> csvFile_c_d_Content = new HashSet<>( + Arrays.asList(Arrays.asList(Expressions.makeConstant("c")), Arrays.asList(Expressions.makeConstant("d"))));; @Test public void testAddDataSourceExistentDataForDifferentPredicates() throws IOException { @@ -52,7 +62,7 @@ public void testAddDataSourceExistentDataForDifferentPredicates() throws IOExcep final Fact factPredicatePArity2 = Expressions.makeFact("p", Arrays.asList(constantA, constantA)); final Fact factPredicateQArity1 = Expressions.makeFact("q", Arrays.asList(constantA)); final Predicate predicateLArity1 = Expressions.makePredicate("l", 1); - final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); + final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_c1_c2_PATH)); final KnowledgeBase kb = new KnowledgeBase(); kb.addStatement(factPredicatePArity2); @@ -63,14 +73,16 @@ public void testAddDataSourceExistentDataForDifferentPredicates() throws IOExcep try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); reasoner.reason(); - final QueryResultIterator queryResultIteratorL1 = reasoner.answerQuery( - Expressions.makePositiveLiteral(predicateLArity1, Expressions.makeVariable("x")), false); - final Set> queryResultsL1 = QueryResultsUtils.collectQueryResults(queryResultIteratorL1); - - final QueryResultIterator queryResultIteratorP1 = reasoner.answerQuery( - Expressions.makePositiveLiteral(predicateParity1, Expressions.makeVariable("x")), false); - final Set> queryResultsP1 = QueryResultsUtils.collectQueryResults(queryResultIteratorP1); - assertEquals(queryResultsL1, queryResultsP1); + try (final QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicateLArity1, Expressions.makeVariable("x")), false)) { + assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + } + try (final QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicateParity1, Expressions.makeVariable("x")), false)) { + assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + } } } @@ -79,7 +91,7 @@ public void testAddDataSourceExistentDataForDifferentPredicates() throws IOExcep public void testAddDataSourceBeforeLoading() throws IOException { final Predicate predicateP = Expressions.makePredicate("p", 1); final Predicate predicateQ = Expressions.makePredicate("q", 1); - final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); + final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_c1_c2_PATH)); final KnowledgeBase kb = new KnowledgeBase(); @@ -87,70 +99,166 @@ public void testAddDataSourceBeforeLoading() throws IOException { kb.addStatement(new DataSourceDeclarationImpl(predicateP, dataSource)); kb.addStatement(new DataSourceDeclarationImpl(predicateQ, dataSource)); reasoner.load(); + try (final QueryResultIterator queryResult = reasoner + .answerQuery(Expressions.makePositiveLiteral(predicateP, Expressions.makeVariable("x")), true)) { + assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + } + try (final QueryResultIterator queryResult = reasoner + .answerQuery(Expressions.makePositiveLiteral(predicateQ, Expressions.makeVariable("x")), true)) { + assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + } + } } - // TODO rewrite test - @Ignore - @Test(expected = ReasonerStateException.class) + @Test public void testAddDataSourceAfterLoading() throws IOException { final Predicate predicateP = Expressions.makePredicate("p", 1); final Predicate predicateQ = Expressions.makePredicate("q", 1); - final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); + final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_c1_c2_PATH)); final KnowledgeBase kb = new KnowledgeBase(); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { kb.addStatement(new DataSourceDeclarationImpl(predicateP, dataSource)); reasoner.load(); + kb.addStatement(new DataSourceDeclarationImpl(predicateQ, dataSource)); + + try (final QueryResultIterator queryResult = reasoner + .answerQuery(Expressions.makePositiveLiteral(predicateP, Expressions.makeVariable("x")), true)) { + assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(MaterialisationState.WRONG, queryResult.getMaterialisationState()); + } + + try (final QueryResultIterator queryResult = reasoner + .answerQuery(Expressions.makePositiveLiteral(predicateQ, Expressions.makeVariable("x")), true)) { + assertFalse(queryResult.hasNext()); + assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + } } } - // TODO rewrite test - @Ignore - @Test(expected = ReasonerStateException.class) + @Test public void testAddDataSourceAfterReasoning() throws IOException { final Predicate predicateP = Expressions.makePredicate("p", 1); final Predicate predicateQ = Expressions.makePredicate("q", 1); - final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); + final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_c1_c2_PATH)); final KnowledgeBase kb = new KnowledgeBase(); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { kb.addStatement(new DataSourceDeclarationImpl(predicateP, dataSource)); - reasoner.load(); reasoner.reason(); + kb.addStatement(new DataSourceDeclarationImpl(predicateQ, dataSource)); + + try (final QueryResultIterator queryResult = reasoner + .answerQuery(Expressions.makePositiveLiteral(predicateP, Expressions.makeVariable("x")), true)) { + assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(MaterialisationState.WRONG, queryResult.getMaterialisationState()); + } + + try (final QueryResultIterator queryResult = reasoner + .answerQuery(Expressions.makePositiveLiteral(predicateQ, Expressions.makeVariable("x")), true)) { + assertFalse(queryResult.hasNext()); + assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + } } } // FIXME decide how to handle datasources with multiple predicates @Ignore - // TODO move to a test class for KnowledgeBase - @Test(expected = IllegalArgumentException.class) + @Test public void testAddDataSourceNoMultipleDataSourcesForPredicate() throws IOException { final Predicate predicate = Expressions.makePredicate("p", 1); - final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); + final DataSource dataSource1 = new CsvFileDataSource(new File(CSV_FILE_c1_c2_PATH)); + final DataSource dataSource2 = new CsvFileDataSource(new File(CSV_FILE_c_d_PATH)); final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); - kb.addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); + kb.addStatement(new DataSourceDeclarationImpl(predicate, dataSource1)); + kb.addStatement(new DataSourceDeclarationImpl(predicate, dataSource2)); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + try (QueryResultIterator queryResult = reasoner + .answerQuery(Expressions.makePositiveLiteral(predicate, Expressions.makeVariable("x")), true)) { + System.out.println(QueryResultsUtils.collectQueryResults(queryResult)); + } + } } // FIXME decide how to handle datasources with multiple predicates @Ignore - // TODO move to a test class for KnowledgeBase - @Test(expected = IllegalArgumentException.class) + @Test public void testAddDataSourceNoFactsForPredicate() throws IOException { final Predicate predicate = Expressions.makePredicate("p", 1); - final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); + final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_c1_c2_PATH)); final Fact fact = Expressions.makeFact(Expressions.makePredicate("p", 1), Arrays.asList(Expressions.makeConstant("a"))); final KnowledgeBase kb = new KnowledgeBase(); kb.addStatement(fact); kb.addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + try (QueryResultIterator queryResult = reasoner + .answerQuery(Expressions.makePositiveLiteral(predicate, Expressions.makeVariable("x")), true)) { + QueryResultsUtils.collectQueryResults(queryResult); + } + } + } + + @Test + public void testAddMultipleDataSourcesForPredicate() throws IOException { + final Predicate predicate = Expressions.makePredicate("p", 1); + final DataSource dataSource1 = new CsvFileDataSource(new File(CSV_FILE_c1_c2_PATH)); + final DataSource dataSource2 = new CsvFileDataSource( + new File(FileDataSourceTestUtils.INPUT_FOLDER + "unaryFactsCD.csv")); + + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(new DataSourceDeclarationImpl(predicate, dataSource1)); + kb.addStatement(new DataSourceDeclarationImpl(predicate, dataSource2)); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + try (QueryResultIterator queryResult = reasoner + .answerQuery(Expressions.makePositiveLiteral(predicate, Expressions.makeVariable("x")), true)) { + final Set> expectedAnswers = new HashSet<>(csvFile_c1_c2_Content); + expectedAnswers.addAll(csvFile_c_d_Content); + + assertEquals(expectedAnswers, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + + } + } + } + + @Test + public void testAddDataSourceAndFactsForPredicate() throws IOException { + final Predicate predicate = Expressions.makePredicate("p", 1); + final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_c1_c2_PATH)); + final Fact fact = Expressions.makeFact(Expressions.makePredicate("p", 1), + Arrays.asList(Expressions.makeConstant("a"))); + + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(fact); + kb.addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + try (QueryResultIterator queryResult = reasoner + .answerQuery(Expressions.makePositiveLiteral(predicate, Expressions.makeVariable("x")), true)) { + final Set> expectedAnswers = new HashSet<>(csvFile_c1_c2_Content); + expectedAnswers.add(Arrays.asList(Expressions.makeConstant("a"))); + + assertEquals(expectedAnswers, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + } + } } } From 48027d59ea97a6a48c6e483b4796d1288cd009bd Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 27 Aug 2019 17:11:12 +0200 Subject: [PATCH 0176/1003] make Reasoner#load() package protected and update examples; This is because currently we allow multiple datasources for the same predicate --- .../vlog4j/core/reasoner/Reasoner.java | 10 ---- .../reasoner/implementation/VLogReasoner.java | 4 +- .../vlog4j/core/reasoner/LoggingTest.java | 10 ---- .../core/reasoner/ReasonerTimeoutTest.java | 10 ---- .../implementation/AnswerQueryTest.java | 8 +-- .../GeneratedAnonymousIndividualsTest.java | 6 --- .../implementation/ReasonerStateTest.java | 50 ++++++++----------- .../examples/CompareWikidataDBpedia.java | 17 +++---- .../vlog4j/examples/CountingTriangles.java | 5 +- .../vlog4j/examples/DoidExample.java | 5 +- .../vlog4j/examples/ExamplesUtils.java | 36 +++++++------ .../InMemoryGraphAnalysisExample.java | 13 +++-- .../examples/SimpleReasoningExample.java | 2 - .../examples/core/AddDataFromCsvFile.java | 26 ++++++++-- .../examples/core/AddDataFromRdfFile.java | 11 ++-- .../core/AddDataFromSparqlQueryResults.java | 9 ++-- .../core/ConfigureReasonerLogging.java | 3 -- .../SkolemVsRestrictedChaseTermination.java | 46 +++++++++-------- .../examples/graal/AddDataFromDlgpFile.java | 25 +++++++--- .../examples/graal/AddDataFromGraal.java | 28 +++++++++-- .../examples/graal/DoidExampleGraal.java | 2 - .../owlapi/OwlOntologyToRulesAndFacts.java | 8 +-- .../examples/rdf/AddDataFromRdfModel.java | 1 - .../vlog4j/rdf/TestReasonOverRdfFacts.java | 4 +- 24 files changed, 166 insertions(+), 173 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index d8872e987..35b18d3cc 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -188,16 +188,6 @@ public static Reasoner getInstance() { */ void setLogFile(String filePath); - /** - * Loads the knowledge base, consisting of the current rules and facts, - * into the reasoner (if it has not been loaded yet). After loading, the - * reasoner is ready for reasoning and querying. - * - * @throws IOException if an I/O error occurs related to the resources in the - * knowledge base to be loaded. - */ - void load() throws IOException; - /** * Checks whether the loaded rules and loaded fact EDB predicates are Acyclic, * Cyclic, or cyclicity cannot be determined. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index d8372e42a..4611a8d7e 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -308,8 +308,8 @@ public RuleRewriteStrategy getRuleRewriteStrategy() { return this.ruleRewriteStrategy; } - @Override - public void load() throws IOException { + //@Override + void load() throws IOException { validateNotClosed(); switch (this.reasonerState) { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java index 1455601df..33b9b8c24 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java @@ -86,7 +86,6 @@ public void testSetLogFileNull() throws IOException { reasoner.setLogFile(null); reasoner.setLogLevel(LogLevel.INFO); - reasoner.load(); reasoner.reason(); } // TODO test that logging is redirected to system output @@ -101,7 +100,6 @@ public void testSetLogFileInexistent() throws IOException { assertFalse(new File(inexistentFilePath).exists()); reasoner.setLogLevel(LogLevel.INFO); - reasoner.load(); reasoner.reason(); } // TODO test that logging is redirected to system output @@ -124,14 +122,12 @@ public void testSetLogFileAppendsToFile() throws IOException { try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.setLogLevel(LogLevel.INFO); reasoner.setLogFile(logFilePath); - reasoner.load(); reasoner.reason(); countLinesBeforeReset = readFile(logFilePath); assertTrue(countLinesBeforeReset > 0); reasoner.resetReasoner(); - reasoner.load(); reasoner.reason(); } final int countLinesAfterReset = readFile(logFilePath); @@ -149,8 +145,6 @@ public void testLogLevelInfo() throws IOException { reasoner.setLogLevel(LogLevel.INFO); reasoner.setLogFile(logFilePath); - reasoner.load(); - reasoner.setLogLevel(LogLevel.INFO); reasoner.reason(); reasoner.setLogLevel(LogLevel.INFO); } @@ -168,8 +162,6 @@ public void testLogLevelDebug() throws IOException { reasoner.setLogLevel(LogLevel.DEBUG); reasoner.setLogFile(logFilePath); - reasoner.load(); - reasoner.setLogLevel(LogLevel.DEBUG); reasoner.reason(); reasoner.setLogLevel(LogLevel.DEBUG); reasoner.close(); @@ -187,7 +179,6 @@ public void testLogLevelDefault() throws IOException { try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.setLogFile(defaultLogFilePath); - reasoner.load(); reasoner.reason(); reasoner.close(); } @@ -199,7 +190,6 @@ public void testLogLevelDefault() throws IOException { try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.setLogFile(warningLogFilePath); reasoner.setLogLevel(LogLevel.WARNING); - reasoner.load(); reasoner.reason(); reasoner.close(); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java index f93ca6b10..586c9c4d7 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java @@ -126,8 +126,6 @@ public void skolem() throws IOException { this.reasoner.setReasoningTimeout(timeout); this.reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); - this.reasoner.load(); - assertFalse(this.reasoner.reason()); } @@ -136,8 +134,6 @@ public void restricted() throws IOException { this.reasoner.setReasoningTimeout(timeout); this.reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); - this.reasoner.load(); - assertFalse(this.reasoner.reason()); } @@ -145,8 +141,6 @@ public void restricted() throws IOException { public void skolemAfterLoad() throws IOException { this.reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); - this.reasoner.load(); - this.reasoner.setReasoningTimeout(timeout); assertFalse(this.reasoner.reason()); @@ -156,8 +150,6 @@ public void skolemAfterLoad() throws IOException { public void restrictedAfterLoad() throws IOException { this.reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); - this.reasoner.load(); - this.reasoner.setReasoningTimeout(timeout); assertFalse(this.reasoner.reason()); @@ -168,7 +160,6 @@ public void resetReasoningTimeoutToNull() throws IOException { this.reasoner.setReasoningTimeout(timeout); this.reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); - this.reasoner.load(); assertFalse(this.reasoner.reason()); this.reasoner.resetReasoner(); @@ -179,7 +170,6 @@ public void resetReasoningTimeoutToNull() throws IOException { this.reasoner.setReasoningTimeout(null); - this.reasoner.load(); assertTrue(this.reasoner.reason()); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java index fa8c39a65..d9a0e2913 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java @@ -47,7 +47,6 @@ import org.semanticweb.vlog4j.core.reasoner.Algorithm; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; public class AnswerQueryTest { @@ -66,9 +65,10 @@ public void testEDBQuerySameConstantSubstitutesSameVariableName() throws IOExcep @SuppressWarnings("unchecked") final Set> factCCD = Sets.newSet(Arrays.asList(constantC, constantC, constantD)); - try (final Reasoner reasoner = Reasoner.getInstance()) { - final KnowledgeBase kb = reasoner.getKnowledgeBase(); - kb.addStatement(fact); + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(fact); + + try (final VLogReasoner reasoner =new VLogReasoner(kb)) { reasoner.load(); final PositiveLiteral queryAtomXYZ = Expressions.makePositiveLiteral(predicate, x, y, z); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java index fc21e98f8..8cd91a117 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java @@ -80,7 +80,6 @@ public void testBlanksSkolemChaseNoRuleRewrite() throws IOException { reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); assertEquals(RuleRewriteStrategy.NONE, reasoner.getRuleRewriteStrategy()); - reasoner.load(); reasoner.reason(); reasoner.exportQueryAnswersToCsv(this.queryAtom, includeBlanksFilePath, true); @@ -99,7 +98,6 @@ public void testBlanksSkolemChaseSplitHeadPieces() throws IOException { // P(?x,?z)} } reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.SPLIT_HEAD_PIECES); - reasoner.load(); reasoner.reason(); reasoner.exportQueryAnswersToCsv(this.queryAtom, includeBlanksFilePath, true); @@ -115,7 +113,6 @@ public void testBlanksRestrictedChaseNoRuleRewrite() throws IOException { reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); assertEquals(RuleRewriteStrategy.NONE, reasoner.getRuleRewriteStrategy()); - reasoner.load(); reasoner.reason(); reasoner.exportQueryAnswersToCsv(this.queryAtom, includeBlanksFilePath, true); @@ -135,11 +132,9 @@ public void testBlanksRestrictedChaseSplitHeadPieces() throws IOException { // P(?x,?z)} } reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.SPLIT_HEAD_PIECES); - reasoner.load(); reasoner.reason(); // FIXME check this test - // <<<<<<< HEAD // reasoner.exportQueryAnswersToCsv(this.queryAtom, includeBlanksFilePath, // true); // // expected fact: P(c, _:b) @@ -151,7 +146,6 @@ public void testBlanksRestrictedChaseSplitHeadPieces() throws IOException { // assertEquals(queryResult.get(0), "c"); // } // reasoner.exportQueryAnswersToCsv(this.queryAtom, excludeBlanksFilePath, - // ======= checkTwoDistinctBlanksGenerated(reasoner); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java index a8eb3c923..fdebd1fdc 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java @@ -83,36 +83,35 @@ public void testSetReasoningTimeout() { @Test public void testAddFactsAndQuery() throws IOException { - try (final Reasoner reasoner = Reasoner.getInstance();) { - reasoner.getKnowledgeBase().addStatement(factPc); + final KnowledgeBase kb = new KnowledgeBase(); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + kb.addStatement(factPc); reasoner.load(); - + final PositiveLiteral query = Expressions.makePositiveLiteral(p, x); - final Set> expectedAnswersC = new HashSet<>( - Arrays.asList(Collections.singletonList(c))); - - try(final QueryResultIterator queryResult = reasoner.answerQuery(query, true)){ + final Set> expectedAnswersC = new HashSet<>(Arrays.asList(Collections.singletonList(c))); + + try (final QueryResultIterator queryResult = reasoner.answerQuery(query, true)) { assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); final Set> queryAnswersC = QueryResultsUtils.collectQueryResults(queryResult); - + assertEquals(expectedAnswersC, queryAnswersC); } - reasoner.getKnowledgeBase().addStatement(factPd); - - try(final QueryResultIterator queryResult = reasoner.answerQuery(query, true)){ + + try (final QueryResultIterator queryResult = reasoner.answerQuery(query, true)) { assertEquals(MaterialisationState.WRONG, queryResult.getMaterialisationState()); assertEquals(expectedAnswersC, QueryResultsUtils.collectQueryResults(queryResult)); } reasoner.load(); - - try(final QueryResultIterator queryResult = reasoner.answerQuery(query, true)){ + + try (final QueryResultIterator queryResult = reasoner.answerQuery(query, true)) { assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); - + final Set> queryAnswersD = QueryResultsUtils.collectQueryResults(queryResult); - + final Set> expectedAnswersCD = new HashSet<>( Arrays.asList(Collections.singletonList(c), Collections.singletonList(d))); assertEquals(expectedAnswersCD, queryAnswersD); @@ -120,16 +119,6 @@ public void testAddFactsAndQuery() throws IOException { } } - // FIXME update test - @Ignore - @Test(expected = ReasonerStateException.class) - public void testAddRules1() throws IOException { - try (final Reasoner reasoner = Reasoner.getInstance();) { - reasoner.getKnowledgeBase().addStatement(ruleQxPx); - reasoner.load(); - } - } - @Test public void testAddRules2() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -314,18 +303,21 @@ public void testFailExportQueryAnswerToCsvBeforeLoad() throws IOException { @Test public void testSuccessiveCloseAfterLoad() throws IOException { - try (final Reasoner reasoner = Reasoner.getInstance()) { + final KnowledgeBase kb = new KnowledgeBase(); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); reasoner.close(); reasoner.close(); } } - @Test - public void testSuccessiveCloseBeforeLoad() { - try (final Reasoner reasoner = Reasoner.getInstance()) { + @Test(expected=ReasonerStateException.class) + public void testSuccessiveCloseBeforeLoad() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.close(); reasoner.close(); + reasoner.load(); } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java index 93ce773fe..7e1031f42 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java @@ -59,17 +59,17 @@ public class CompareWikidataDBpedia { static String sparqlGetWikiIriDBpedia = "?result ?enwikipageHttp . " + "BIND( IRI(CONCAT(\"https\",SUBSTR(str(?enwikipageHttp), 5))) AS ?enwikipage)"; - public static void main(String[] args) throws ParsingException, IOException { + public static void main(final String[] args) throws ParsingException, IOException { ExamplesUtils.configureLogging(); // Wikidata pattern: P69 is "educated at"; Q154804 is "University of Leipzig" - String wikidataSparql = "?result wdt:P69 wd:Q154804 . " + sparqlGetWikiIriWikidata; + final String wikidataSparql = "?result wdt:P69 wd:Q154804 . " + sparqlGetWikiIriWikidata; // DBpedia pattern: - String dbpediaSparql = "?result . " + final String dbpediaSparql = "?result . " + sparqlGetWikiIriDBpedia; // Configure the SPARQL data sources and some rules to analyse results: - String rules = "" // + final String rules = "" // + "@prefix wdqs: ." // + "@prefix dbp: ." // + "@source dbpResult(2) : sparql(dbp:sparql, \"result,enwikipage\", '''" + dbpediaSparql + "''') ." // @@ -86,19 +86,18 @@ public static void main(String[] args) throws ParsingException, IOException { final KnowledgeBase kb = RuleParser.parse(rules); try (final Reasoner reasoner = new VLogReasoner(kb)) { - reasoner.load(); reasoner.reason(); - int resultCount = ExamplesUtils.getQueryAnswerCount("result(?X)", reasoner); - int wdCount = ExamplesUtils.getQueryAnswerCount("inWd(?X)", reasoner); - int dbpCount = ExamplesUtils.getQueryAnswerCount("inDbp(?X)", reasoner); + final int resultCount = ExamplesUtils.getQueryAnswerCount("result(?X)", reasoner); + final int wdCount = ExamplesUtils.getQueryAnswerCount("inWd(?X)", reasoner); + final int dbpCount = ExamplesUtils.getQueryAnswerCount("inDbp(?X)", reasoner); System.out.println("Found " + resultCount + " matching entities overall, of which " + wdCount + " were in Wikidata and " + dbpCount + " were in DBPedia"); System.out.println("We focus on results found in DBpedia only (usually the smaller set)."); ExamplesUtils.printOutQueryAnswers("dbpOnly(?X)", reasoner); - + System.out.println("Note: some of these results might still be in Wikidata, due to:\n" + "* recent Wikipedia article renamings that are not updated in DBpedia\n" + "* failure to match Wikipedia URLs due to small differences in character encoding\n"); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java index 3a4fc8cc5..4a7f2a690 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java @@ -57,10 +57,9 @@ public static void main(final String[] args) throws IOException { try (VLogReasoner reasoner = new VLogReasoner(kb)) { - /* Initialise reasoner and compute inferences */ - reasoner.load(); - System.out.println("Note: Materialisation includes SPARQL query answering."); + + /* Initialise reasoner and compute inferences */ reasoner.reason(); /* Execute queries */ diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java index bfbd3a3d7..19c70a27a 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java @@ -65,10 +65,9 @@ public static void main(final String[] args) throws IOException { reasoner.setLogFile(ExamplesUtils.OUTPUT_FOLDER + "vlog.log"); reasoner.setLogLevel(LogLevel.DEBUG); - /* Initialise reasoner and compute inferences */ - reasoner.load(); - System.out.println("Note: Materialisation includes SPARQL query answering."); + + /* Initialise reasoner and compute inferences */ reasoner.reason(); /* Execute some queries */ diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java index 929bb1832..4d8c57346 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java @@ -33,6 +33,7 @@ import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.MaterialisationState; import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.parser.ParsingException; @@ -66,10 +67,10 @@ private ExamplesUtils() { */ public static void configureLogging() { // Create the appender that will write log messages to the console. - ConsoleAppender consoleAppender = new ConsoleAppender(); + final ConsoleAppender consoleAppender = new ConsoleAppender(); // Define the pattern of log messages. // Insert the string "%c{1}:%L" to also show class name and line. - String pattern = "%d{yyyy-MM-dd HH:mm:ss} %-5p - %m%n"; + final String pattern = "%d{yyyy-MM-dd HH:mm:ss} %-5p - %m%n"; consoleAppender.setLayout(new PatternLayout(pattern)); // Change to Level.ERROR for fewer messages: consoleAppender.setThreshold(Level.DEBUG); @@ -89,8 +90,10 @@ public static void printOutQueryAnswers(final PositiveLiteral queryAtom, final R System.out.println("Answers to query " + queryAtom + " :"); try (final QueryResultIterator answers = reasoner.answerQuery(queryAtom, true)) { answers.forEachRemaining(answer -> System.out.println(" - " + answer)); - System.out.println(); + + System.out.println("Query answers are: " + answers.getMaterialisationState()); } + System.out.println(); } /** @@ -102,9 +105,9 @@ public static void printOutQueryAnswers(final PositiveLiteral queryAtom, final R */ public static void printOutQueryAnswers(final String queryString, final Reasoner reasoner) { try { - PositiveLiteral query = RuleParser.parsePositiveLiteral(queryString); + final PositiveLiteral query = RuleParser.parsePositiveLiteral(queryString); printOutQueryAnswers(query, reasoner); - } catch (ParsingException e) { + } catch (final ParsingException e) { throw new RuntimeException(e.getMessage(), e); } } @@ -119,11 +122,11 @@ public static void printOutQueryAnswers(final String queryString, final Reasoner */ public static int getQueryAnswerCount(final String queryString, final Reasoner reasoner) { try { - PositiveLiteral query = RuleParser.parsePositiveLiteral(queryString); + final PositiveLiteral query = RuleParser.parsePositiveLiteral(queryString); try (final QueryResultIterator answers = reasoner.answerQuery(query, true)) { return iteratorSize(answers); } - } catch (ParsingException e) { + } catch (final ParsingException e) { throw new RuntimeException(e.getMessage(), e); } } @@ -136,10 +139,11 @@ public static int getQueryAnswerCount(final String queryString, final Reasoner r * @param Iterator to iterate over * @return number of elements in iterator */ - public static int iteratorSize(Iterator iterator) { + public static int iteratorSize(final Iterator iterator) { int size = 0; - for (; iterator.hasNext(); ++size) + for (; iterator.hasNext(); ++size) { iterator.next(); + } return size; } @@ -149,10 +153,11 @@ public static int iteratorSize(Iterator iterator) { * @param predicateName for the new predicate * @param arity number of variables */ - private static PositiveLiteral makeQueryAtom(String predicateName, int arity) { + private static PositiveLiteral makeQueryAtom(final String predicateName, final int arity) { final List vars = new ArrayList<>(); - for (int i = 0; i < arity; i++) + for (int i = 0; i < arity; i++) { vars.add(Expressions.makeVariable("x" + i)); + } return Expressions.makePositiveLiteral(predicateName, vars); } @@ -163,11 +168,14 @@ private static PositiveLiteral makeQueryAtom(String predicateName, int arity) { * @param atomName atom's name * @param arity atom's arity */ - public static void exportQueryAnswersToCSV(Reasoner reasoner, String atomName, int arity) + public static void exportQueryAnswersToCSV(final Reasoner reasoner, final String atomName, final int arity) throws ReasonerStateException, IOException { final PositiveLiteral atom = makeQueryAtom(atomName, arity); - String path = ExamplesUtils.OUTPUT_FOLDER + atomName + ".csv"; - reasoner.exportQueryAnswersToCsv(atom, path, true); + final String path = ExamplesUtils.OUTPUT_FOLDER + atomName + ".csv"; + + final MaterialisationState correctness = reasoner.exportQueryAnswersToCsv(atom, path, true); + + System.out.println("Query answers are: " + correctness); } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java index 05d0b65ef..9e68b8406 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java @@ -45,15 +45,15 @@ */ public class InMemoryGraphAnalysisExample { - public static void main(String[] args) throws ParsingException, IOException { + public static void main(final String[] args) throws ParsingException, IOException { ExamplesUtils.configureLogging(); /* 1. Create a simple random graph */ System.out.println("Generating random graph ..."); - int vertexCount = 10000; - double density = 0.03; + final int vertexCount = 10000; + final double density = 0.03; // initialise data source for storing edges (estimate how many we'll need) - InMemoryDataSource edges = new InMemoryDataSource(2, (int) (vertexCount * vertexCount * density) + 1000); + final InMemoryDataSource edges = new InMemoryDataSource(2, (int) (vertexCount * vertexCount * density) + 1000); int edgeCount = 0; for (int i = 1; i <= vertexCount; i++) { for (int j = 1; j <= vertexCount; j++) { @@ -64,7 +64,7 @@ public static void main(String[] args) throws ParsingException, IOException { } } // also make a unary data source to mark vertices: - InMemoryDataSource vertices = new InMemoryDataSource(1, vertexCount); + final InMemoryDataSource vertices = new InMemoryDataSource(1, vertexCount); for (int i = 1; i <= vertexCount; i++) { vertices.addTuple("v" + i); } @@ -85,13 +85,12 @@ public static void main(String[] args) throws ParsingException, IOException { /* 3. Use reasoner to compute some query results */ try (final Reasoner reasoner = new VLogReasoner(kb)) { - reasoner.load(); reasoner.reason(); System.out.println("Number of vertices not reachable from vertex 1 by a bi-directional path: " + ExamplesUtils.getQueryAnswerCount("unreachable(?X)", reasoner)); System.out.println("Number of bi-directional triangles: " - + ExamplesUtils.getQueryAnswerCount("triangle(?X,?Y,?Z)", reasoner) / 6); + + (ExamplesUtils.getQueryAnswerCount("triangle(?X,?Y,?Z)", reasoner) / 6)); } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java index 81a3362f9..5b5875a63 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java @@ -77,8 +77,6 @@ public static void main(final String[] args) throws IOException { } try (final Reasoner reasoner = new VLogReasoner(kb)) { - reasoner.load(); - reasoner.reason(); /* Execute some queries */ diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java index 7af9eb473..b68db527b 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java @@ -55,9 +55,11 @@ public static void main(final String[] args) throws IOException, ParsingExceptio ExamplesUtils.configureLogging(); + final String initialFactsHasPart = ""// a file input: + + "@source hasPart(2) : load-csv(\"" + ExamplesUtils.INPUT_FOLDER + "hasPartEDB.csv.gz\") ."; + final String rules = "" // first declare file inputs: + "@source bicycle(1) : load-csv(\"" + ExamplesUtils.INPUT_FOLDER + "bicycleEDB.csv.gz\") ." - + "@source hasPart(2) : load-csv(\"" + ExamplesUtils.INPUT_FOLDER + "hasPartEDB.csv.gz\") ." + "@source wheel(1) : load-csv(\"" + ExamplesUtils.INPUT_FOLDER + "wheelEDB.csv.gz\") ." // every bicycle has some part that is a wheel: + "hasPart(?X, !Y), wheel(!Y) :- bicycle(?X) ." @@ -67,20 +69,36 @@ public static void main(final String[] args) throws IOException, ParsingExceptio + "hasPart(?X, ?Y) :- isPartOf(?Y, ?X) ." // + "isPartOf(?X, ?Y) :- hasPart(?Y, ?X) ."; - final KnowledgeBase kb = RuleParser.parse(rules); - /* * Loading, reasoning, and querying while using try-with-resources to close the * reasoner automatically. */ + final KnowledgeBase kb = new KnowledgeBase(); try (final Reasoner reasoner = new VLogReasoner(kb)) { - reasoner.load(); + /* + * 1. Loading the initial facts with hasPart predicate into reasoner. + */ + RuleParser.parseInto(kb, initialFactsHasPart); + reasoner.reason(); + + /* + * Query initial facts with hasPart predicate. + */ System.out.println("Before materialisation:"); ExamplesUtils.printOutQueryAnswers("hasPart(?X, ?Y)", reasoner); + /* + * 2. Loading further facts and rules into the reasoner, and materialising the + * loaded facts with the rules. + */ + RuleParser.parseInto(kb, rules); /* The reasoner will use the Restricted Chase by default. */ reasoner.reason(); + + /* + * Querying facts with hasPart predicate after materialisation. + */ System.out.println("After materialisation:"); final PositiveLiteral hasPartXY = RuleParser.parsePositiveLiteral("hasPart(?X, ?Y)"); ExamplesUtils.printOutQueryAnswers(hasPartXY, reasoner); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java index dd62026ce..a00a008d1 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java @@ -79,16 +79,11 @@ public static void main(final String[] args) throws IOException, ParsingExceptio final KnowledgeBase kb = RuleParser.parse(rules); /* - * 2. Loading, reasoning, querying and exporting, while using try-with-resources - * to close the reasoner automatically. + * 2. reasoning, querying and exporting, while using try-with-resources to close + * the reasoner automatically. */ - try (final Reasoner reasoner = new VLogReasoner(kb)) { - reasoner.load(); - - System.out.println("Before materialisation:"); - - ExamplesUtils.printOutQueryAnswers("triple(?X, , ?Y)", reasoner); + try (final Reasoner reasoner = new VLogReasoner(kb)) { /* The reasoner will use the Restricted Chase by default. */ reasoner.reason(); System.out.println("After materialisation:"); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java index 40084ccdb..6dd775bb9 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java @@ -129,7 +129,7 @@ public static void main(final String[] args) throws IOException { */ kb.addStatement(new DataSourceDeclarationImpl(queryPredicate, sparqlQueryResultDataSource)); - reasoner.load(); + reasoner.reason(); /* * We construct a query PositiveLiteral for the predicated associated to the @@ -164,13 +164,10 @@ public static void main(final String[] args) throws IOException { final Rule rule = Expressions.makeRule(ruleHeadConjunction, Expressions.makeConjunction(query)); /* - * We reset the reasoner in order to add the created rule, and reason on the - * data added from the Wikidata SPARQL query result. + * We add the created rule, and reason on the data added from the Wikidata + * SPARQL query result. */ - reasoner.resetReasoner(); - kb.addStatement(rule); - reasoner.load(); reasoner.reason(); /* We query the reasoner for facts of the haveChildrenTogether predicate. */ diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java index 3ca012638..828fa4339 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java @@ -103,7 +103,6 @@ public static void main(final String[] args) throws IOException { * Default reasoner log level is WARNING. */ reasoner.setLogFile(reasonerWarningLogFilePath); - reasoner.load(); reasoner.reason(); /* @@ -123,7 +122,6 @@ public static void main(final String[] args) throws IOException { */ reasoner.setLogFile(reasonerInfoLogFilePath); - reasoner.load(); reasoner.reason(); reasoner.resetReasoner(); @@ -138,7 +136,6 @@ public static void main(final String[] args) throws IOException { * redirected to System output by default. */ reasoner.setLogFile(reasonerDebugLogFilePath); - reasoner.load(); reasoner.reason(); } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java index aef1c2df4..80dbaba9e 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java @@ -46,13 +46,13 @@ public static void main(final String[] args) throws IOException, ParsingExceptio ExamplesUtils.configureLogging(); - /* 1. Load data and prepare rules. */ - - final String rules = "" // define some facts: + final String facts = ""// define some facts: + "bicycle(bicycle1) ." // + "hasPart(bicycle1, wheel1) ." // + "wheel(wheel1) ." // - + "bicycle(bicycle2) ." // + + "bicycle(bicycle2) ."; + + final String rules = "" // every bicycle has some part that is a wheel: + "hasPart(?X, !Y), wheel(!Y) :- bicycle(?X) ." // // every wheel is part of some bicycle: @@ -61,15 +61,20 @@ public static void main(final String[] args) throws IOException, ParsingExceptio + "hasPart(?X, ?Y) :- isPartOf(?Y, ?X) ." // + "isPartOf(?X, ?Y) :- hasPart(?Y, ?X) ."; - final KnowledgeBase kb = RuleParser.parse(rules); + /* + * 1. Load facts into a knowledge base + */ + final KnowledgeBase kb = RuleParser.parse(facts); /* - * 2. Loading, reasoning, and querying. Use try-with resources, or remember to - * call close() to free the reasoner resources. + * 2. Load the knowledge base into the reasoner */ try (VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.load(); + reasoner.reason(); + /* + * 3. Query the reasoner before applying rules for fact materialisation + */ final PositiveLiteral queryHasPart = RuleParser.parsePositiveLiteral("hasPart(?X, ?Y)"); /* See that there is no fact HasPartIDB before reasoning. */ @@ -77,8 +82,13 @@ public static void main(final String[] args) throws IOException, ParsingExceptio ExamplesUtils.printOutQueryAnswers(queryHasPart, reasoner); /* - * As the Skolem Chase is known not to terminate for this set of rules and - * facts, it is interrupted after one second. + * 4. Load rules into the knowledge base + */ + RuleParser.parseInto(kb, rules); + /* + * 5. Materialise with the Skolem Chase. As the Skolem Chase is known not to + * terminate for this set of rules and facts, it is interrupted after one + * second. */ reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); reasoner.setReasoningTimeout(1); @@ -97,23 +107,15 @@ public static void main(final String[] args) throws IOException, ParsingExceptio + ExamplesUtils.iteratorSize(answers) + " results for hasPart(?X, ?Y)."); /* - * We reset the reasoner and apply the Restricted Chase on the same set of rules - * and facts + * 6. We reset the reasoner to discard all inferences, and apply the Restricted + * Chase on the same set of rules and facts */ System.out.println(); reasoner.resetReasoner(); - reasoner.load(); - - /* - * See that there is no fact HasPartIDB before reasoning. All inferred facts - * have been discarded when the reasoner was reset. - */ - System.out.println("We can verify that there are no inferences for hasPart(?X, ?Y) after reset."); - ExamplesUtils.printOutQueryAnswers(queryHasPart, reasoner); /* - * As the Restricted Chase is known to terminate for this set of rules and - * facts, we will not interrupt it. + * 7. Materialise with the Restricted Chase. As the Restricted Chase is known to + * terminate for this set of rules and facts, we will not interrupt it. */ reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); reasoner.setReasoningTimeout(null); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java index f429ff4a0..07bfafa49 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java @@ -103,19 +103,30 @@ public static void main(final String[] args) throws IOException { try (Reasoner reasoner = Reasoner.getInstance()) { final KnowledgeBase kb = reasoner.getKnowledgeBase(); - kb.addStatements(GraalToVLog4JModelConverter.convertRules(graalRules)); - for (final GraalConjunctiveQueryToRule graalConjunctiveQueryToRule : convertedConjunctiveQueries) { - kb.addStatement(graalConjunctiveQueryToRule.getRule()); - } - kb.addStatements(GraalToVLog4JModelConverter.convertAtomsToFacts(graalAtoms)); - reasoner.load(); + /* + * Add facts to the reasoner knowledge base + */ + kb.addStatements(GraalToVLog4JModelConverter.convertAtomsToFacts(graalAtoms)); + /* + * Load the knowledge base into the reasoner + */ + reasoner.reason(); System.out.println("Before materialisation:"); for (final GraalConjunctiveQueryToRule graalConjunctiveQueryToRule : convertedConjunctiveQueries) { ExamplesUtils.printOutQueryAnswers(graalConjunctiveQueryToRule.getQuery(), reasoner); } - /* The reasoner will use the Restricted Chase by default. */ + /* + * Add rules to the reasoner knowledge base + */ + kb.addStatements(GraalToVLog4JModelConverter.convertRules(graalRules)); + for (final GraalConjunctiveQueryToRule graalConjunctiveQueryToRule : convertedConjunctiveQueries) { + kb.addStatement(graalConjunctiveQueryToRule.getRule()); + } + /* + * Materialise facts using rules + */ reasoner.reason(); System.out.println("After materialisation:"); for (final GraalConjunctiveQueryToRule graalConjunctiveQueryToRule : convertedConjunctiveQueries) { diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java index 7d595dfad..497acef27 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java @@ -122,17 +122,35 @@ public static void main(final String[] args) throws IOException { * the reasoner automatically. */ final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(GraalToVLog4JModelConverter.convertRules(graalRules)); - kb.addStatements(convertedGraalConjunctiveQuery.getRule()); - kb.addStatements(GraalToVLog4JModelConverter.convertAtomsToFacts(graalAtoms)); try (Reasoner reasoner = new VLogReasoner(kb)) { - reasoner.load(); + + /* + * Add facts to the reasoner knowledge base + */ + kb.addStatements(GraalToVLog4JModelConverter.convertAtomsToFacts(graalAtoms)); + /* + * Load the knowledge base into the reasoner + */ + reasoner.reason(); + + /* + * Query the loaded facts + */ System.out.println("Before materialisation:"); ExamplesUtils.printOutQueryAnswers(convertedGraalConjunctiveQuery.getQuery(), reasoner); - /* The reasoner will use the Restricted Chase by default. */ + /* + * Add rules to the reasoner knowledge base + */ + kb.addStatements(GraalToVLog4JModelConverter.convertRules(graalRules)); + kb.addStatements(convertedGraalConjunctiveQuery.getRule()); + + /* + * Materialise facts using rules + */ reasoner.reason(); + System.out.println("After materialisation:"); ExamplesUtils.printOutQueryAnswers(convertedGraalConjunctiveQuery.getQuery(), reasoner); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java index 5d27f95e1..4a7263218 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java @@ -129,8 +129,6 @@ public static void main(final String[] args) throws IOException { System.out.println("Rules configured:\n--"); kb.getRules().forEach(System.out::println); System.out.println("--"); - reasoner.load(); - System.out.println("Loading completed."); System.out.println("Starting reasoning (including SPARQL query answering) ..."); reasoner.reason(); System.out.println("... reasoning completed."); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java index 3f14f1556..c85abf354 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java @@ -87,10 +87,10 @@ public static void main(final String[] args) throws OWLOntologyCreationException kb.addStatements(owlToRulesConverter.getFacts()); try (VLogReasoner reasoner = new VLogReasoner(kb)) { - /* Load rules and facts obtained from the ontology */ - reasoner.load(); - - /* Reason over loaded ontology with the default algorithm Restricted Chase */ + /* + * Load rules and facts obtained from the ontology, and reason over loaded + * ontology with the default algorithm Restricted Chase + */ System.out.println("Reasoning default algorithm: " + reasoner.getAlgorithm()); reasoner.reason(); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java index b3839f86a..7f8dc3024 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java @@ -132,7 +132,6 @@ public static void main(final String[] args) kb.addStatements(tripleFactsISWC2017); try (VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.load(); reasoner.reason(); /* We query for persons whose organization name is "TU Dresden" . */ diff --git a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java b/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java index e1c32fc63..0bd2e7d25 100644 --- a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java +++ b/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java @@ -72,7 +72,7 @@ public void testCanLoadRdfFactsIntoReasoner() throws RDFParseException, RDFHandl kb.addStatements(facts); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.load(); + reasoner.reason(); final PositiveLiteral universalQuery = makePositiveLiteral(RDF_TRIPLE_PREDICATE_NAME, Arrays.asList(subject, predicate, object)); @@ -91,7 +91,7 @@ public void testQueryAnsweringOverRdfFacts() throws RDFParseException, RDFHandle kb.addStatements(facts); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.load(); + reasoner.reason(); final Constant inventionPredicate = makeConstant("https://example.org/invention"); final Constant carlBenzSubject = makeConstant("https://example.org/Carl-Benz"); From 14b6e138667c1c7f4407ed11717a679a911a78df Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 27 Aug 2019 17:28:33 +0200 Subject: [PATCH 0177/1003] fix log messages --- .../reasoner/implementation/VLogReasoner.java | 30 +++++++++---------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 4611a8d7e..b2262d9a4 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -286,7 +286,7 @@ public Algorithm getAlgorithm() { public void setReasoningTimeout(Integer seconds) { validateNotClosed(); if (seconds != null) { - Validate.isTrue(seconds > 0, "Only strictly positive timeout period alowed!", seconds); + Validate.isTrue(seconds > 0, "Only strictly positive timeout period allowed!", seconds); } this.timeoutAfterSeconds = seconds; } @@ -433,11 +433,11 @@ void loadInMemoryDataSource(DataSource dataSource, Predicate predicate) { this.vLog.addData(vLogPredicateName, inMemoryDataSource.getData()); if (LOGGER.isDebugEnabled()) { for (final String[] tuple : inMemoryDataSource.getData()) { - LOGGER.debug("Loaded direct fact " + vLogPredicateName + Arrays.toString(tuple)); + LOGGER.debug("Loaded direct fact {}{}.", vLogPredicateName, Arrays.toString(tuple)); } } } catch (final EDBConfigurationException e) { - throw new RuntimeException("Invalid data sources configuration.", e); + throw new RuntimeException("Invalid data sources configuration!", e); } } @@ -459,12 +459,12 @@ void validateDataSourcePredicateArity(Predicate predicate, DataSource dataSource final int dataSourcePredicateArity = this.vLog .getPredicateArity(ModelToVLogConverter.toVLogPredicate(predicate)); if (dataSourcePredicateArity == -1) { - LOGGER.warn("Data source {} for predicate {} is empty: ", dataSource, predicate); + LOGGER.warn("Data source {} for predicate {} is empty! ", dataSource, predicate); } else if (predicate.getArity() != dataSourcePredicateArity) { throw new IncompatiblePredicateArityException(predicate, dataSourcePredicateArity, dataSource); } } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); + throw new RuntimeException("Inconsistent reasoner state!", e); } } @@ -483,11 +483,11 @@ void loadFacts() { this.vLog.addData(vLogPredicateName, vLogPredicateTuples); if (LOGGER.isDebugEnabled()) { for (final String[] tuple : vLogPredicateTuples) { - LOGGER.debug("Loaded direct fact " + vLogPredicateName + Arrays.toString(tuple)); + LOGGER.debug("Loaded direct fact {}{}.", vLogPredicateName, Arrays.toString(tuple)); } } } catch (final EDBConfigurationException e) { - throw new RuntimeException("Invalid data sources configuration.", e); + throw new RuntimeException("Invalid data sources configuration!", e); } } } @@ -500,11 +500,11 @@ void loadRules() { this.vLog.setRules(vLogRuleArray, vLogRuleRewriteStrategy); if (LOGGER.isDebugEnabled()) { for (final karmaresearch.vlog.Rule rule : vLogRuleArray) { - LOGGER.debug("Loaded rule " + rule.toString()); + LOGGER.debug("Loaded rule {}.", rule.toString()); } } } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); + throw new RuntimeException("Inconsistent reasoner state!", e); } } @@ -584,7 +584,7 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBla throw new RuntimeException("Inconsistent reasoner state.", e); } catch (final NonExistingPredicateException e1) { LOGGER.warn("Query uses predicate " + query.getPredicate() - + " that does not occur in the knowledge base. Answer must be empty."); + + " that does not occur in the knowledge base. Answer must be empty!"); return new EmptyQueryResultIterator(MaterialisationState.COMPLETE); } @@ -608,7 +608,7 @@ public MaterialisationState exportQueryAnswersToCsv(final PositiveLiteral query, try { this.vLog.writeQueryResultsToCsv(vLogAtom, csvFilePath, filterBlanks); } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); + throw new RuntimeException("Inconsistent reasoner state!", e); } catch (final NonExistingPredicateException e1) { throw new IllegalArgumentException(MessageFormat.format( "The query predicate does not occur in the loaded Knowledge Base: {0}!", query.getPredicate()), e1); @@ -685,7 +685,7 @@ public boolean isMFC() { validateNotClosed(); if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { throw new ReasonerStateException(this.reasonerState, - "checking rules acyclicity is not allowed before loading!"); + "Checking rules acyclicity is not allowed before loading!"); } CyclicCheckResult checkCyclic; @@ -701,7 +701,7 @@ private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) { validateNotClosed(); if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { throw new ReasonerStateException(this.reasonerState, - "checking rules acyclicity is not allowed before loading!"); + "Checking rules acyclicity is not allowed before loading!"); } CyclicCheckResult checkCyclic; @@ -785,8 +785,8 @@ private void updateReasonerToKnowledgeBaseChanged() { */ void validateNotClosed() throws ReasonerStateException { if (this.reasonerState == ReasonerState.CLOSED) { - LOGGER.error("Invalid operation requested on a closed reasoner object."); - throw new ReasonerStateException(this.reasonerState, "Operation not allowed after closing reasoner."); + LOGGER.error("Invalid operation requested on a closed reasoner object!"); + throw new ReasonerStateException(this.reasonerState, "Operation not allowed after closing reasoner!"); } } From 739271cfa471141a3d11ed68fbb951b7804ba2ef Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 27 Aug 2019 17:39:51 +0200 Subject: [PATCH 0178/1003] renamed MaterialisationState to Correctness --- ...rialisationState.java => Correctness.java} | 19 +++++------ .../core/reasoner/QueryResultIterator.java | 2 +- .../vlog4j/core/reasoner/Reasoner.java | 2 +- .../EmptyQueryResultIterator.java | 8 ++--- .../VLogQueryResultIterator.java | 8 ++--- .../reasoner/implementation/VLogReasoner.java | 32 +++++++++---------- .../implementation/AddDataSourceTest.java | 22 ++++++------- .../implementation/ReasonerStateTest.java | 8 ++--- .../vlog4j/examples/ExamplesUtils.java | 4 +-- 9 files changed, 53 insertions(+), 52 deletions(-) rename vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/{MaterialisationState.java => Correctness.java} (69%) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/MaterialisationState.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Correctness.java similarity index 69% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/MaterialisationState.java rename to vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Correctness.java index 3e405eae8..a7dc7917d 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/MaterialisationState.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Correctness.java @@ -27,29 +27,30 @@ * @author Irina Dragoste * */ -public enum MaterialisationState { +public enum Correctness { /** * Reasoning has not completed. Query answering yields sound, but possibly * incomplete answers. */ - INCOMPLETE("incomplete"), + SOUND_BUT_INCOMPLETE("sound but incomplete"), /** - * Query answering may give incorrect answers. Re-materialisation - * ({@link Reasoner#reason()}) is required, in order to obtain correct results. + * Query answering may give incorrect (unsound or incomplete) answers. + * Re-materialisation ({@link Reasoner#reason()}) is required, in order to + * obtain correct results. */ - WRONG("wrong"), + INCORRECT("incorrect"), /** - * Reasoning over current knowledge base is complete, and query answering yields - * sound and complete results. + * Reasoning over current knowledge base has completed, and query answering + * yields correct (sound and complete) results. */ - COMPLETE("complete"); + SOUND_AND_COMPLETE("sound and complete"); private final String name; - private MaterialisationState(String name) { + private Correctness(String name) { this.name = name; } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryResultIterator.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryResultIterator.java index c2a7ee746..e0ad5217c 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryResultIterator.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryResultIterator.java @@ -26,7 +26,7 @@ public interface QueryResultIterator extends Iterator, AutoCloseable { - public MaterialisationState getMaterialisationState(); + public Correctness getMaterialisationState(); public void close(); } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 35b18d3cc..f55b5e2a1 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -375,7 +375,7 @@ public static Reasoner getInstance() { * ({@code csvFilePath)}. */ // TODO update javadoc with return type - MaterialisationState exportQueryAnswersToCsv(PositiveLiteral query, String csvFilePath, boolean includeBlanks) + Correctness exportQueryAnswersToCsv(PositiveLiteral query, String csvFilePath, boolean includeBlanks) throws IOException; /** diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/EmptyQueryResultIterator.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/EmptyQueryResultIterator.java index 268c27371..d83bac081 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/EmptyQueryResultIterator.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/EmptyQueryResultIterator.java @@ -21,7 +21,7 @@ */ import org.semanticweb.vlog4j.core.model.api.QueryResult; -import org.semanticweb.vlog4j.core.reasoner.MaterialisationState; +import org.semanticweb.vlog4j.core.reasoner.Correctness; import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; /** @@ -32,9 +32,9 @@ */ public class EmptyQueryResultIterator implements QueryResultIterator { - final MaterialisationState materialisationState; + final Correctness materialisationState; - public EmptyQueryResultIterator(MaterialisationState materialisationState) { + public EmptyQueryResultIterator(Correctness materialisationState) { this.materialisationState = materialisationState; } @@ -53,7 +53,7 @@ public QueryResult next() { return null; } - public MaterialisationState getMaterialisationState() { + public Correctness getMaterialisationState() { return this.materialisationState; } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogQueryResultIterator.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogQueryResultIterator.java index fe91caae8..648250fd9 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogQueryResultIterator.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogQueryResultIterator.java @@ -21,7 +21,7 @@ */ import org.semanticweb.vlog4j.core.model.api.QueryResult; -import org.semanticweb.vlog4j.core.reasoner.MaterialisationState; +import org.semanticweb.vlog4j.core.reasoner.Correctness; import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import karmaresearch.vlog.Term; @@ -38,11 +38,11 @@ public class VLogQueryResultIterator implements QueryResultIterator { private final TermQueryResultIterator vLogTermQueryResultIterator; - private final MaterialisationState materialisationState; + private final Correctness materialisationState; // TODO add reasoningState to constructor public VLogQueryResultIterator(final TermQueryResultIterator termQueryResultIterator, - final MaterialisationState materialisationState) { + final Correctness materialisationState) { this.vLogTermQueryResultIterator = termQueryResultIterator; this.materialisationState = materialisationState; } @@ -63,7 +63,7 @@ public void close() { this.vLogTermQueryResultIterator.close(); } - public MaterialisationState getMaterialisationState() { + public Correctness getMaterialisationState() { return this.materialisationState; } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index b2262d9a4..21b8a5706 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -31,10 +31,10 @@ import org.semanticweb.vlog4j.core.model.implementation.VariableImpl; import org.semanticweb.vlog4j.core.reasoner.AcyclicityNotion; import org.semanticweb.vlog4j.core.reasoner.Algorithm; +import org.semanticweb.vlog4j.core.reasoner.Correctness; import org.semanticweb.vlog4j.core.reasoner.CyclicityResult; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.LogLevel; -import org.semanticweb.vlog4j.core.reasoner.MaterialisationState; import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.ReasonerState; @@ -244,7 +244,7 @@ void addEdbAlias(DataSourceDeclaration dataSourceDeclaration) { final Set rules = new HashSet<>(); private ReasonerState reasonerState = ReasonerState.KB_NOT_LOADED; - private MaterialisationState materialisationState = MaterialisationState.INCOMPLETE; + private Correctness correctness = Correctness.SOUND_BUT_INCOMPLETE; private LogLevel internalLogLevel = LogLevel.WARNING; private Algorithm algorithm = Algorithm.RESTRICTED_CHASE; @@ -357,7 +357,7 @@ void loadKnowledgeBase() throws IOException { this.reasonerState = ReasonerState.KB_LOADED; // if there are no rules, then materialisation state is complete - this.materialisationState = rules.isEmpty()? MaterialisationState.COMPLETE: MaterialisationState.INCOMPLETE; + this.correctness = rules.isEmpty()? Correctness.SOUND_AND_COMPLETE: Correctness.SOUND_BUT_INCOMPLETE; LOGGER.info("Finished loading knowledge base."); } @@ -558,10 +558,10 @@ private void runChase() { } if (this.reasoningCompleted) { - this.materialisationState = MaterialisationState.COMPLETE; + this.correctness = Correctness.SOUND_AND_COMPLETE; LOGGER.info("Completed materialisation of inferences."); } else { - this.materialisationState = MaterialisationState.INCOMPLETE; + this.correctness = Correctness.SOUND_BUT_INCOMPLETE; LOGGER.info("Stopped materialisation of inferences (possibly incomplete)."); } } @@ -585,15 +585,15 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBla } catch (final NonExistingPredicateException e1) { LOGGER.warn("Query uses predicate " + query.getPredicate() + " that does not occur in the knowledge base. Answer must be empty!"); - return new EmptyQueryResultIterator(MaterialisationState.COMPLETE); + return new EmptyQueryResultIterator(Correctness.SOUND_AND_COMPLETE); } - logWarningOnMaterialisationState(); - return new VLogQueryResultIterator(stringQueryResultIterator, this.materialisationState); + logWarningOnCorrectness(); + return new VLogQueryResultIterator(stringQueryResultIterator, this.correctness); } @Override - public MaterialisationState exportQueryAnswersToCsv(final PositiveLiteral query, final String csvFilePath, + public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final String csvFilePath, final boolean includeBlanks) throws IOException { validateNotClosed(); if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { @@ -614,14 +614,14 @@ public MaterialisationState exportQueryAnswersToCsv(final PositiveLiteral query, "The query predicate does not occur in the loaded Knowledge Base: {0}!", query.getPredicate()), e1); } - logWarningOnMaterialisationState(); - return this.materialisationState; + logWarningOnCorrectness(); + return this.correctness; } - private void logWarningOnMaterialisationState() { - if (this.materialisationState != MaterialisationState.COMPLETE) { + private void logWarningOnCorrectness() { + if (this.correctness != Correctness.SOUND_AND_COMPLETE) { LOGGER.warn("Query answers may be {} with respect to the current Knowledge Base!", - this.materialisationState); + this.correctness); } } @@ -750,7 +750,7 @@ private void updateReasonerToKnowledgeBaseChanged() { || this.reasonerState.equals(ReasonerState.MATERIALISED)) { this.reasonerState = ReasonerState.KB_CHANGED; - this.materialisationState = MaterialisationState.WRONG; + this.correctness = Correctness.INCORRECT; } } @@ -774,7 +774,7 @@ private void updateReasonerToKnowledgeBaseChanged() { // private void updateMaterialisationStateOnStatementsAdded(boolean materialisationInvalidated) { // if (this.reasonerState.equals(ReasonerState.KB_CHANGED) && materialisationInvalidated) { -// this.materialisationState = MaterialisationState.WRONG; +// this.materialisationState = Correctness.WRONG; // } // } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java index 37c62d6d4..b387d4b80 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java @@ -39,8 +39,8 @@ import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.Correctness; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.MaterialisationState; import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; public class AddDataSourceTest { @@ -76,12 +76,12 @@ public void testAddDataSourceExistentDataForDifferentPredicates() throws IOExcep try (final QueryResultIterator queryResult = reasoner.answerQuery( Expressions.makePositiveLiteral(predicateLArity1, Expressions.makeVariable("x")), false)) { assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); - assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); } try (final QueryResultIterator queryResult = reasoner.answerQuery( Expressions.makePositiveLiteral(predicateParity1, Expressions.makeVariable("x")), false)) { assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); - assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); } } @@ -102,12 +102,12 @@ public void testAddDataSourceBeforeLoading() throws IOException { try (final QueryResultIterator queryResult = reasoner .answerQuery(Expressions.makePositiveLiteral(predicateP, Expressions.makeVariable("x")), true)) { assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); - assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); } try (final QueryResultIterator queryResult = reasoner .answerQuery(Expressions.makePositiveLiteral(predicateQ, Expressions.makeVariable("x")), true)) { assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); - assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); } } @@ -130,13 +130,13 @@ public void testAddDataSourceAfterLoading() throws IOException { try (final QueryResultIterator queryResult = reasoner .answerQuery(Expressions.makePositiveLiteral(predicateP, Expressions.makeVariable("x")), true)) { assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); - assertEquals(MaterialisationState.WRONG, queryResult.getMaterialisationState()); + assertEquals(Correctness.INCORRECT, queryResult.getMaterialisationState()); } try (final QueryResultIterator queryResult = reasoner .answerQuery(Expressions.makePositiveLiteral(predicateQ, Expressions.makeVariable("x")), true)) { assertFalse(queryResult.hasNext()); - assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); } } } @@ -158,13 +158,13 @@ public void testAddDataSourceAfterReasoning() throws IOException { try (final QueryResultIterator queryResult = reasoner .answerQuery(Expressions.makePositiveLiteral(predicateP, Expressions.makeVariable("x")), true)) { assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); - assertEquals(MaterialisationState.WRONG, queryResult.getMaterialisationState()); + assertEquals(Correctness.INCORRECT, queryResult.getMaterialisationState()); } try (final QueryResultIterator queryResult = reasoner .answerQuery(Expressions.makePositiveLiteral(predicateQ, Expressions.makeVariable("x")), true)) { assertFalse(queryResult.hasNext()); - assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); } } } @@ -231,7 +231,7 @@ public void testAddMultipleDataSourcesForPredicate() throws IOException { expectedAnswers.addAll(csvFile_c_d_Content); assertEquals(expectedAnswers, QueryResultsUtils.collectQueryResults(queryResult)); - assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); } } @@ -256,7 +256,7 @@ public void testAddDataSourceAndFactsForPredicate() throws IOException { expectedAnswers.add(Arrays.asList(Expressions.makeConstant("a"))); assertEquals(expectedAnswers, QueryResultsUtils.collectQueryResults(queryResult)); - assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); } } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java index fdebd1fdc..f16fcd530 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java @@ -46,8 +46,8 @@ import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.Algorithm; +import org.semanticweb.vlog4j.core.reasoner.Correctness; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.MaterialisationState; import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; @@ -92,7 +92,7 @@ public void testAddFactsAndQuery() throws IOException { final Set> expectedAnswersC = new HashSet<>(Arrays.asList(Collections.singletonList(c))); try (final QueryResultIterator queryResult = reasoner.answerQuery(query, true)) { - assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); final Set> queryAnswersC = QueryResultsUtils.collectQueryResults(queryResult); assertEquals(expectedAnswersC, queryAnswersC); @@ -101,14 +101,14 @@ public void testAddFactsAndQuery() throws IOException { reasoner.getKnowledgeBase().addStatement(factPd); try (final QueryResultIterator queryResult = reasoner.answerQuery(query, true)) { - assertEquals(MaterialisationState.WRONG, queryResult.getMaterialisationState()); + assertEquals(Correctness.INCORRECT, queryResult.getMaterialisationState()); assertEquals(expectedAnswersC, QueryResultsUtils.collectQueryResults(queryResult)); } reasoner.load(); try (final QueryResultIterator queryResult = reasoner.answerQuery(query, true)) { - assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); final Set> queryAnswersD = QueryResultsUtils.collectQueryResults(queryResult); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java index 4d8c57346..03cc92e2e 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java @@ -33,7 +33,7 @@ import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.MaterialisationState; +import org.semanticweb.vlog4j.core.reasoner.Correctness; import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.parser.ParsingException; @@ -173,7 +173,7 @@ public static void exportQueryAnswersToCSV(final Reasoner reasoner, final String final PositiveLiteral atom = makeQueryAtom(atomName, arity); final String path = ExamplesUtils.OUTPUT_FOLDER + atomName + ".csv"; - final MaterialisationState correctness = reasoner.exportQueryAnswersToCsv(atom, path, true); + final Correctness correctness = reasoner.exportQueryAnswersToCsv(atom, path, true); System.out.println("Query answers are: " + correctness); } From 139c4dfc47505cb5f0912273bd507d5db69c10d6 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 27 Aug 2019 18:35:21 +0200 Subject: [PATCH 0179/1003] correct issue with KnowledgeBase events --- .../vlog4j/core/model/api/Statement.java | 2 +- .../vlog4j/core/reasoner/KnowledgeBase.java | 50 ++++++++++--------- .../core/reasoner/KnowledgeBaseListener.java | 47 +++++++++-------- .../reasoner/implementation/VLogReasoner.java | 2 +- 4 files changed, 52 insertions(+), 49 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Statement.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Statement.java index c120c1ef9..464397b18 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Statement.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Statement.java @@ -21,7 +21,7 @@ */ /** - * A statement is any element that a knowledge base can sonsist of, such as a + * A statement is any element that a knowledge base can consist of, such as a * {@link Rule}, {@link Fact}, or {@link DataSourceDeclaration}. * * @author Markus Kroetzsch diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java index 7e716479b..376806aaa 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -16,7 +16,6 @@ import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.StatementVisitor; @@ -42,7 +41,7 @@ */ /** - * A knowledge base with rules, facts, and declartions for loading data from + * A knowledge base with rules, facts, and declarations for loading data from * further sources. This is a "syntactic" object in that it represents some * information that is not relevant for the semantics of reasoning, but that is * needed to ensure faithful re-serialisation of knowledge bases loaded from @@ -129,12 +128,12 @@ public Void visit(DataSourceDeclaration statement) { */ private final LinkedHashSet statements = new LinkedHashSet<>(); - /** - * Known prefixes that can be used to pretty-print the contents of the knowledge - * base. We try to preserve user-provided prefixes found in files when loading - * data. - */ - PrefixDeclarations prefixDeclarations; +// /** +// * Known prefixes that can be used to pretty-print the contents of the knowledge +// * base. We try to preserve user-provided prefixes found in files when loading +// * data. +// */ +// PrefixDeclarations prefixDeclarations; /** * Index structure that organises all facts by their predicate. @@ -172,13 +171,17 @@ public void deleteListener(KnowledgeBaseListener listener) { * @return true, if the knowledge base has changed. * @param statement */ - public boolean addStatement(Statement statement) { - Validate.notNull(statement, "Statement cannot be Null."); - if (!this.statements.contains(statement) && statement.accept(this.addStatementVisitor)) { - this.statements.add(statement); + public void addStatement(Statement statement) { + if (doAddStatement(statement)) { - notifyListenersOnStatementAdded(statement); + } + notifyListenersOnStatementAdded(statement); + } + boolean doAddStatement(Statement statement) { + Validate.notNull(statement, "Statement cannot be Null!"); + if (!this.statements.contains(statement) && statement.accept(this.addStatementVisitor)) { + this.statements.add(statement); return true; } return false; @@ -190,16 +193,15 @@ public boolean addStatement(Statement statement) { * @param statements */ public void addStatements(Collection statements) { - final Set addedStatements = new HashSet<>(); + final List addedStatements = new ArrayList<>(); for (final Statement statement : statements) { - if (addStatement(statement)) { + if (doAddStatement(statement)) { addedStatements.add(statement); } } notifyListenersOnStatementsAdded(addedStatements); - } /** @@ -208,10 +210,10 @@ public void addStatements(Collection statements) { * @param statements */ public void addStatements(Statement... statements) { - final Set addedStatements = new HashSet<>(); - + final List addedStatements = new ArrayList<>(); + for (final Statement statement : statements) { - if (addStatement(statement)) { + if (doAddStatement(statement)) { addedStatements.add(statement); } } @@ -219,9 +221,11 @@ public void addStatements(Statement... statements) { notifyListenersOnStatementsAdded(addedStatements); } - private void notifyListenersOnStatementsAdded(final Set addedStatements) { - for (final KnowledgeBaseListener listener : this.listeners) { - listener.onStatementsAdded(addedStatements); + private void notifyListenersOnStatementsAdded(final List addedStatements) { + if (!addedStatements.isEmpty()) { + for (final KnowledgeBaseListener listener : this.listeners) { + listener.onStatementsAdded(addedStatements); + } } } @@ -286,7 +290,7 @@ void addFact(Fact fact) { * Returns all {@link Statement}s of this knowledge base. * * The result can be iterated over and will return statements in the original - * order. The collection is read-only and cannot be modified to add or delete + * order. The collection is read-only and cannot be modified to add or delete * statements. * * @return a collection of statements diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java index cae99a5a2..5639b54bc 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java @@ -1,33 +1,32 @@ package org.semanticweb.vlog4j.core.reasoner; -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 - 2019 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.util.Set; +import java.util.List; import org.semanticweb.vlog4j.core.model.api.Statement; +/** + * Listener to {@link KnowledgeBase} content change events. + * + * @author Irina Dragoste + * + */ public interface KnowledgeBaseListener { - + + /** + * Event triggered whenever a new statement is added to the associated knowledge + * base. + * + * @param statementAdded new statement added to the knowledge base. + */ void onStatementAdded(Statement statementAdded); - - void onStatementsAdded(Set statementsAdded); + + /** + * Event triggered whenever new statements are added to the associated knowledge + * base. + * + * @param statementsAdded a list of new statements that have been added to the + * knowledge base. + */ + void onStatementsAdded(List statementsAdded); } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 21b8a5706..07363b22d 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -728,7 +728,7 @@ public CyclicityResult checkForCycles() { } @Override - public void onStatementsAdded(Set statementsAdded) { + public void onStatementsAdded(List statementsAdded) { // TODO more elaborate materialisation state handling // updateReasonerStateToKnowledgeBaseChanged(); // updateMaterialisationStateOnStatementsAdded(statementsAddedInvalidateMaterialisation(statementsAdded)); From 5423e5506710df9eb4e8256eebd187e1e37f8156 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 27 Aug 2019 18:38:46 +0200 Subject: [PATCH 0180/1003] KnowledgeBaseListener license header --- .../core/reasoner/KnowledgeBaseListener.java | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java index 5639b54bc..a98ee08d1 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java @@ -1,5 +1,25 @@ package org.semanticweb.vlog4j.core.reasoner; +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import java.util.List; import org.semanticweb.vlog4j.core.model.api.Statement; From d557a84a8082d2f140d1f464cdce60485d757359 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 27 Aug 2019 18:58:51 +0200 Subject: [PATCH 0181/1003] change QueryResultIterator api --- .../core/reasoner/QueryResultIterator.java | 24 +++++++++++++++++-- .../EmptyQueryResultIterator.java | 10 ++++---- .../VLogQueryResultIterator.java | 10 ++++---- .../reasoner/implementation/VLogReasoner.java | 23 +++++++++--------- .../implementation/AddDataSourceTest.java | 20 ++++++++-------- .../implementation/ReasonerStateTest.java | 6 ++--- .../vlog4j/examples/ExamplesUtils.java | 2 +- 7 files changed, 58 insertions(+), 37 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryResultIterator.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryResultIterator.java index e0ad5217c..743497b26 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryResultIterator.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryResultIterator.java @@ -24,9 +24,29 @@ import org.semanticweb.vlog4j.core.model.api.QueryResult; +/** + * Iterator for {@link QueryResult}s. + * + * @author Irina Dragoste + * + */ public interface QueryResultIterator extends Iterator, AutoCloseable { - public Correctness getMaterialisationState(); - + /** + * Returns the correctness of the query result. + *
      + *
    • If {@link Correctness#SOUND_AND_COMPLETE}, the query results are + * guaranteed to be correct.
    • + *
    • If {@link Correctness#SOUND_BUT_INCOMPLETE}, the results are guaranteed + * to be sound, but may be incomplete.
    • + *
    • If {@link Correctness#INCORRECT}, the results may be incomplete, and some + * results may be unsound. + *
    + * + * @return query result correctness + */ + public Correctness getCorrectness(); + + @Override public void close(); } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/EmptyQueryResultIterator.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/EmptyQueryResultIterator.java index d83bac081..1d1cd1575 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/EmptyQueryResultIterator.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/EmptyQueryResultIterator.java @@ -32,10 +32,10 @@ */ public class EmptyQueryResultIterator implements QueryResultIterator { - final Correctness materialisationState; + final Correctness correctness; - public EmptyQueryResultIterator(Correctness materialisationState) { - this.materialisationState = materialisationState; + public EmptyQueryResultIterator(Correctness correctness) { + this.correctness = correctness; } @Override @@ -53,8 +53,8 @@ public QueryResult next() { return null; } - public Correctness getMaterialisationState() { - return this.materialisationState; + public Correctness getCorrectness() { + return this.correctness; } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogQueryResultIterator.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogQueryResultIterator.java index 648250fd9..043aea636 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogQueryResultIterator.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogQueryResultIterator.java @@ -38,13 +38,12 @@ public class VLogQueryResultIterator implements QueryResultIterator { private final TermQueryResultIterator vLogTermQueryResultIterator; - private final Correctness materialisationState; + private final Correctness correctness; - // TODO add reasoningState to constructor public VLogQueryResultIterator(final TermQueryResultIterator termQueryResultIterator, final Correctness materialisationState) { this.vLogTermQueryResultIterator = termQueryResultIterator; - this.materialisationState = materialisationState; + this.correctness = materialisationState; } @Override @@ -63,8 +62,9 @@ public void close() { this.vLogTermQueryResultIterator.close(); } - public Correctness getMaterialisationState() { - return this.materialisationState; + @Override + public Correctness getCorrectness() { + return this.correctness; } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 07363b22d..cd25a8b8f 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -74,12 +74,7 @@ /** * Reasoner implementation using the VLog backend. * - * @TODO Due to automatic predicate renaming, it can happen that an EDB - * predicate cannot be queried after loading unless reasoning has already - * been invoked (since the auxiliary rule that imports the EDB facts to - * the "real" predicate must be used). This issue could be weakened by - * rewriting queries to (single-source) EDB predicates internally when in - * such a state, + * * * @author Markus Kroetzsch * @@ -308,8 +303,15 @@ public RuleRewriteStrategy getRuleRewriteStrategy() { return this.ruleRewriteStrategy; } - //@Override - void load() throws IOException { + /* + * TODO Due to automatic predicate renaming, it can happen that an EDB predicate + * cannot be queried after loading unless reasoning has already been invoked + * (since the auxiliary rule that imports the EDB facts to the "real" predicate + * must be used). This issue could be weakened by rewriting queries to + * (single-source) EDB predicates internally when in such a state, + */ + // @Override + void load() throws IOException { validateNotClosed(); switch (this.reasonerState) { @@ -357,7 +359,7 @@ void loadKnowledgeBase() throws IOException { this.reasonerState = ReasonerState.KB_LOADED; // if there are no rules, then materialisation state is complete - this.correctness = rules.isEmpty()? Correctness.SOUND_AND_COMPLETE: Correctness.SOUND_BUT_INCOMPLETE; + this.correctness = rules.isEmpty() ? Correctness.SOUND_AND_COMPLETE : Correctness.SOUND_BUT_INCOMPLETE; LOGGER.info("Finished loading knowledge base."); } @@ -620,8 +622,7 @@ public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final St private void logWarningOnCorrectness() { if (this.correctness != Correctness.SOUND_AND_COMPLETE) { - LOGGER.warn("Query answers may be {} with respect to the current Knowledge Base!", - this.correctness); + LOGGER.warn("Query answers may be {} with respect to the current Knowledge Base!", this.correctness); } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java index b387d4b80..720b80bf1 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java @@ -76,12 +76,12 @@ public void testAddDataSourceExistentDataForDifferentPredicates() throws IOExcep try (final QueryResultIterator queryResult = reasoner.answerQuery( Expressions.makePositiveLiteral(predicateLArity1, Expressions.makeVariable("x")), false)) { assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); - assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); } try (final QueryResultIterator queryResult = reasoner.answerQuery( Expressions.makePositiveLiteral(predicateParity1, Expressions.makeVariable("x")), false)) { assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); - assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); } } @@ -102,12 +102,12 @@ public void testAddDataSourceBeforeLoading() throws IOException { try (final QueryResultIterator queryResult = reasoner .answerQuery(Expressions.makePositiveLiteral(predicateP, Expressions.makeVariable("x")), true)) { assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); - assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); } try (final QueryResultIterator queryResult = reasoner .answerQuery(Expressions.makePositiveLiteral(predicateQ, Expressions.makeVariable("x")), true)) { assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); - assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); } } @@ -130,13 +130,13 @@ public void testAddDataSourceAfterLoading() throws IOException { try (final QueryResultIterator queryResult = reasoner .answerQuery(Expressions.makePositiveLiteral(predicateP, Expressions.makeVariable("x")), true)) { assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); - assertEquals(Correctness.INCORRECT, queryResult.getMaterialisationState()); + assertEquals(Correctness.INCORRECT, queryResult.getCorrectness()); } try (final QueryResultIterator queryResult = reasoner .answerQuery(Expressions.makePositiveLiteral(predicateQ, Expressions.makeVariable("x")), true)) { assertFalse(queryResult.hasNext()); - assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); } } } @@ -158,13 +158,13 @@ public void testAddDataSourceAfterReasoning() throws IOException { try (final QueryResultIterator queryResult = reasoner .answerQuery(Expressions.makePositiveLiteral(predicateP, Expressions.makeVariable("x")), true)) { assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); - assertEquals(Correctness.INCORRECT, queryResult.getMaterialisationState()); + assertEquals(Correctness.INCORRECT, queryResult.getCorrectness()); } try (final QueryResultIterator queryResult = reasoner .answerQuery(Expressions.makePositiveLiteral(predicateQ, Expressions.makeVariable("x")), true)) { assertFalse(queryResult.hasNext()); - assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); } } } @@ -231,7 +231,7 @@ public void testAddMultipleDataSourcesForPredicate() throws IOException { expectedAnswers.addAll(csvFile_c_d_Content); assertEquals(expectedAnswers, QueryResultsUtils.collectQueryResults(queryResult)); - assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); } } @@ -256,7 +256,7 @@ public void testAddDataSourceAndFactsForPredicate() throws IOException { expectedAnswers.add(Arrays.asList(Expressions.makeConstant("a"))); assertEquals(expectedAnswers, QueryResultsUtils.collectQueryResults(queryResult)); - assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); } } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java index f16fcd530..d81a714a1 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java @@ -92,7 +92,7 @@ public void testAddFactsAndQuery() throws IOException { final Set> expectedAnswersC = new HashSet<>(Arrays.asList(Collections.singletonList(c))); try (final QueryResultIterator queryResult = reasoner.answerQuery(query, true)) { - assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); final Set> queryAnswersC = QueryResultsUtils.collectQueryResults(queryResult); assertEquals(expectedAnswersC, queryAnswersC); @@ -101,14 +101,14 @@ public void testAddFactsAndQuery() throws IOException { reasoner.getKnowledgeBase().addStatement(factPd); try (final QueryResultIterator queryResult = reasoner.answerQuery(query, true)) { - assertEquals(Correctness.INCORRECT, queryResult.getMaterialisationState()); + assertEquals(Correctness.INCORRECT, queryResult.getCorrectness()); assertEquals(expectedAnswersC, QueryResultsUtils.collectQueryResults(queryResult)); } reasoner.load(); try (final QueryResultIterator queryResult = reasoner.answerQuery(query, true)) { - assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); final Set> queryAnswersD = QueryResultsUtils.collectQueryResults(queryResult); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java index 03cc92e2e..55dbbf354 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java @@ -91,7 +91,7 @@ public static void printOutQueryAnswers(final PositiveLiteral queryAtom, final R try (final QueryResultIterator answers = reasoner.answerQuery(queryAtom, true)) { answers.forEachRemaining(answer -> System.out.println(" - " + answer)); - System.out.println("Query answers are: " + answers.getMaterialisationState()); + System.out.println("Query answers are: " + answers.getCorrectness()); } System.out.println(); } From 61ecc40d3caf1e5fa0181d3b8f1bf5c9e911eb81 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 27 Aug 2019 19:08:22 +0200 Subject: [PATCH 0182/1003] update Reasoner interface level javaodc --- .../vlog4j/core/reasoner/Reasoner.java | 38 ++++++------------- 1 file changed, 12 insertions(+), 26 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index f55b5e2a1..61b905664 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -2,9 +2,9 @@ import java.io.IOException; -import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.TermType; @@ -40,25 +40,19 @@ * reasoning.
    * Facts can be added to the knowledge base: *
      - *
    • as in-memory Java objects ({@link #addFacts(Atom...)}
    • - *
    • from a persistent data source - * ({@link #addFactsFromDataSource(Predicate, DataSource)})
    • + *
    • as in-memory Java objects ({@link Fact})
    • + *
    • from a persistent data source ({@link DataSourceDeclaration})
    • *
    - * Note that facts with the same predicate cannot come from multiple sources - * (where a source can be a collection of in-memory {@link Atom} objects, or a - * {@link DataSource} .
    - * Rules added to the knowledge base ({@link #addRules(Rule...)}) can be - * re-written internally by VLog, using the corresponding set - * {@link RuleRewriteStrategy}.
    *
    - * Once adding facts and rules to the knowledge base has been completed, the - * knowledge base can be loaded into the reasoner. + * Rules added to the knowledge base can be re-written internally by + * VLog, using the corresponding set {@link RuleRewriteStrategy}.
    + *
    * - * The loaded reasoner can perform atomic queries on explicit facts - * before reasoning, and all implicit and explicit facts after calling - * {@link Reasoner#reason()}. Queries can provide an iterator for the results - * ({@link #answerQuery(Atom, boolean)}, or the results can be exported to a - * file ({@link #exportQueryAnswersToCsv(Atom, String, boolean)}).
    + * The loaded reasoner can perform atomic queries on explicit and + * implicit facts after calling {@link Reasoner#reason()}. Queries can provide + * an iterator for the results ({@link #answerQuery(Atom, boolean)}, or the + * results can be exported to a file + * ({@link #exportQueryAnswersToCsv(Atom, String, boolean)}).
    *
    * Reasoning with various {@link Algorithm}s is supported, that can lead * to different sets of inferred facts and different termination behavior. In @@ -67,14 +61,6 @@ * recommend reasoning with algorithm {@link Algorithm#RESTRICTED_CHASE}, as it * leads to termination in more cases. To avoid non-termination, a reasoning * timeout can be set ({@link Reasoner#setReasoningTimeout(Integer)}).
    - * Incremental reasoning is not supported. To add more facts and rule to - * the knowledge base and reason again, the reasoner needs to be - * reset ({@link #resetReasoner()}) to the state of its knowledge base - * before loading. Then, more information can be added to the knowledge base, - * the reasoner can be loaded again, and querying and reasoning can be - * performed. - * - * @FIXME Update the outdated JavaDoc * * @author Irina Dragoste * From ae01347185e11807dc4b2c9d6009505ed5c6d9ff Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 27 Aug 2019 19:17:56 +0200 Subject: [PATCH 0183/1003] update Reasoner javadoc --- .../vlog4j/core/reasoner/Reasoner.java | 31 ++++++------------- 1 file changed, 10 insertions(+), 21 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 61b905664..975e0bb3d 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -184,11 +184,11 @@ public static Reasoner getInstance() { /** * Check the Joint Acyclicity (JA) property of loaded rules and EDB - * predicates of loaded facts. If a set of rules and EDB predicates is JA, then, + * predicates of loaded facts. If a set of rules and EDB predicates is JA, then, * for the given set of rules and any facts over the given EDB predicates, * reasoning by {@link Algorithm#SKOLEM_CHASE Skolem chase} (and, implicitly, * the {@link Algorithm#RESTRICTED_CHASE Restricted chase}) will always - * terminate + * terminate. * * @return {@code true}, if the loaded set of rules is Joint Acyclic with * respect to the EDB predicates of loaded facts.
    @@ -199,7 +199,7 @@ public static Reasoner getInstance() { /** * Check the Restricted Joint Acyclicity (RJA) property of loaded rules * and EDB predicates of loaded facts. If a set of rules and EDB predicates is - * RJA, then, for the given set of rules and any facts over the given EDB + * RJA, then, for the given set of rules and any facts over the given EDB * predicates, reasoning by {@link Algorithm#RESTRICTED_CHASE Restricted chase} * will always terminate * @@ -211,7 +211,7 @@ public static Reasoner getInstance() { /** * Check the Model-Faithful Acyclicity (MFA) property of loaded rules and - * EDB predicates of loaded facts. If a set of rules and EDB predicates is MFA, + * EDB predicates of loaded facts. If a set of rules and EDB predicates is MFA, * then, for the given set of rules and any facts over the given EDB predicates, * reasoning by {@link Algorithm#SKOLEM_CHASE Skolem chase} (and, implicitly, * the {@link Algorithm#RESTRICTED_CHASE Restricted chase}) will always @@ -226,7 +226,7 @@ public static Reasoner getInstance() { /** * Check the Restricted Model-Faithful Acyclicity (RMFA) property of * loaded rules and EDB predicates of loaded facts. If a set of rules and EDB - * predicates is RMFA, then, for the given set of rules and any facts over the + * predicates is RMFA, then, for the given set of rules and any facts over the * given EDB predicates, reasoning by {@link Algorithm#RESTRICTED_CHASE * Restricted chase} will always terminate. If a set of rules and EDB predicates * is MFA, then it is also JA. @@ -239,7 +239,7 @@ public static Reasoner getInstance() { /** * Check the Model-Faithful Cyclicity (MFC) property of loaded rules and - * EDB predicates of loaded facts. If a set of rules and EDB predicates is MFC, + * EDB predicates of loaded facts. If a set of rules and EDB predicates is MFC, * then there exists a set of facts over the given EDB predicates for which * reasoning by {@link Algorithm#SKOLEM_CHASE Skolem chase} algorithm is * guaranteed not to terminate for the loaded rules. If a set of rules and EDB @@ -253,8 +253,8 @@ public static Reasoner getInstance() { boolean isMFC(); /** - * Performs reasoning on the loaded knowledge base, depending on the set - * {@link Algorithm}. Reasoning implies extending the set of explicit facts in + * Performs materialisation on the reasoner {@link KnowledgeBase}, depending on the set + * {@link Algorithm}. Materialisation implies extending the set of explicit facts in * the knowledge base with implicit facts inferred by knowledge base rules.
    *
    * In some cases, reasoning with rules with existentially quantified variables @@ -263,21 +263,10 @@ public static Reasoner getInstance() { * leads to termination in more cases.
    * To avoid non-termination, a reasoning timeout can be set * ({@link Reasoner#setReasoningTimeout(Integer)}).
    - *
    - * Incremental reasoning is not supported. To add more facts and rule to - * the knowledge base and reason again, the reasoner needs to be - * reset ({@link #resetReasoner()}) to the state of its knowledge base - * before loading. Then, more information can be added to the knowledge base, - * the reasoner can be loaded again, and querying and reasoning can be - * performed. - * * @return *
      - *
    • the value returned by the previous {@link Reasoner#reason()} - * call, if successive reasoning is attempted before a - * {@link Reasoner#resetReasoner()}.
    • - *
    • {@code true}, if reasoning reached completion.
    • - *
    • {@code false}, if reasoning has been interrupted before + *
    • {@code true}, if materialisation reached completion.
    • + *
    • {@code false}, if materialisation has been interrupted before * completion.
    • *
    * @throws IOException if I/O exceptions occur during reasoning. From 64e8b83fd4d9f82d210a0db9f134e95e50db7070 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 27 Aug 2019 19:26:29 +0200 Subject: [PATCH 0184/1003] update Reasoner javadoc --- .../vlog4j/core/reasoner/Reasoner.java | 53 +++++++++---------- .../reasoner/implementation/VLogReasoner.java | 12 +++-- 2 files changed, 34 insertions(+), 31 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 975e0bb3d..e584fb3e5 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -184,11 +184,11 @@ public static Reasoner getInstance() { /** * Check the Joint Acyclicity (JA) property of loaded rules and EDB - * predicates of loaded facts. If a set of rules and EDB predicates is JA, then, - * for the given set of rules and any facts over the given EDB predicates, - * reasoning by {@link Algorithm#SKOLEM_CHASE Skolem chase} (and, implicitly, - * the {@link Algorithm#RESTRICTED_CHASE Restricted chase}) will always - * terminate. + * predicates of loaded facts. If a set of rules and EDB predicates is + * JA, then, for the given set of rules and any facts over the given EDB + * predicates, reasoning by {@link Algorithm#SKOLEM_CHASE Skolem chase} (and, + * implicitly, the {@link Algorithm#RESTRICTED_CHASE Restricted chase}) will + * always terminate. * * @return {@code true}, if the loaded set of rules is Joint Acyclic with * respect to the EDB predicates of loaded facts.
    @@ -211,11 +211,11 @@ public static Reasoner getInstance() { /** * Check the Model-Faithful Acyclicity (MFA) property of loaded rules and - * EDB predicates of loaded facts. If a set of rules and EDB predicates is MFA, - * then, for the given set of rules and any facts over the given EDB predicates, - * reasoning by {@link Algorithm#SKOLEM_CHASE Skolem chase} (and, implicitly, - * the {@link Algorithm#RESTRICTED_CHASE Restricted chase}) will always - * terminate + * EDB predicates of loaded facts. If a set of rules and EDB predicates is + * MFA, then, for the given set of rules and any facts over the given EDB + * predicates, reasoning by {@link Algorithm#SKOLEM_CHASE Skolem chase} (and, + * implicitly, the {@link Algorithm#RESTRICTED_CHASE Restricted chase}) will + * always terminate * * @return {@code true}, if the loaded set of rules is Model-Faithful Acyclic * with respect to the EDB predicates of loaded facts.
    @@ -226,8 +226,8 @@ public static Reasoner getInstance() { /** * Check the Restricted Model-Faithful Acyclicity (RMFA) property of * loaded rules and EDB predicates of loaded facts. If a set of rules and EDB - * predicates is RMFA, then, for the given set of rules and any facts over the - * given EDB predicates, reasoning by {@link Algorithm#RESTRICTED_CHASE + * predicates is RMFA, then, for the given set of rules and any facts + * over the given EDB predicates, reasoning by {@link Algorithm#RESTRICTED_CHASE * Restricted chase} will always terminate. If a set of rules and EDB predicates * is MFA, then it is also JA. * @@ -239,12 +239,12 @@ public static Reasoner getInstance() { /** * Check the Model-Faithful Cyclicity (MFC) property of loaded rules and - * EDB predicates of loaded facts. If a set of rules and EDB predicates is MFC, - * then there exists a set of facts over the given EDB predicates for which - * reasoning by {@link Algorithm#SKOLEM_CHASE Skolem chase} algorithm is - * guaranteed not to terminate for the loaded rules. If a set of rules and EDB - * predicates is RMFA, then it is also RJA. Therefore, if a set or rules and EDB - * predicates is MFC, it is not MFA, nor JA. + * EDB predicates of loaded facts. If a set of rules and EDB predicates is + * MFC, then there exists a set of facts over the given EDB predicates + * for which reasoning by {@link Algorithm#SKOLEM_CHASE Skolem chase} algorithm + * is guaranteed not to terminate for the loaded rules. If a set of rules and + * EDB predicates is RMFA, then it is also RJA. Therefore, if a set or rules and + * EDB predicates is MFC, it is not MFA, nor JA. * * @return {@code true}, if the loaded set of rules is Model-Faithful Cyclic * with respect to the EDB predicates of loaded facts.
    @@ -253,9 +253,10 @@ public static Reasoner getInstance() { boolean isMFC(); /** - * Performs materialisation on the reasoner {@link KnowledgeBase}, depending on the set - * {@link Algorithm}. Materialisation implies extending the set of explicit facts in - * the knowledge base with implicit facts inferred by knowledge base rules.
    + * Performs materialisation on the reasoner {@link KnowledgeBase}, depending on + * the set {@link Algorithm}. Materialisation implies extending the set of + * explicit facts in the knowledge base with implicit facts inferred by + * knowledge base rules.
    *
    * In some cases, reasoning with rules with existentially quantified variables * {@link Rule#getExistentiallyQuantifiedVariables()} may not terminate. We @@ -263,6 +264,7 @@ public static Reasoner getInstance() { * leads to termination in more cases.
    * To avoid non-termination, a reasoning timeout can be set * ({@link Reasoner#setReasoningTimeout(Integer)}).
    + * * @return *
    * * - * @param query a {@link PositiveLiteral} representing the query to be - * answered. - * @param includeBlanks if {@code true}, {@link QueryResult}s containing terms - * of type {@link TermType#NAMED_NULL} (representing anonymous - * individuals introduced to satisfy rule existentially - * quantified variables) will be included. Otherwise, the - * answers will only contain the {@link QueryResult}s with - * terms of type {@link TermType#CONSTANT} (representing - * named individuals). + * @param query a {@link PositiveLiteral} representing the query to be + * answered. + * @param includeNulls if {@code true}, {@link QueryResult}s containing terms of + * type {@link TermType#NAMED_NULL} (representing anonymous + * individuals introduced to satisfy rule existentially + * quantified variables) will be included. Otherwise, the + * answers will only contain the {@link QueryResult}s with + * terms of type {@link TermType#CONSTANT} (representing + * named individuals). * @return QueryResultIterator that iterates over distinct answers to the query. * It also contains the {@link Correctness} of the query answers. */ - QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBlanks); + QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNulls); // TODO add examples to query javadoc /** @@ -341,24 +341,24 @@ public static Reasoner getInstance() { * {@link TermType#CONSTANT} terms of the {@code query} appear in the answer * fact at the same term position, and the {@link TermType#VARIABLE} terms of * the {@code query} are matched by terms in the fact, either named - * ({@link TermType#CONSTANT}) or anonymous ({@link TermType#NAMED_NULL}). The same - * variable name identifies the same term in the answer fact.
    + * ({@link TermType#CONSTANT}) or anonymous ({@link TermType#NAMED_NULL}). The + * same variable name identifies the same term in the answer fact.
    * A query can have multiple, distinct query answers. Each answers is written on * a separate line in the given file. * - * @param query a {@link PositiveLiteral} representing the query to be - * answered. - * @param csvFilePath path to a .csv file where the query - * answers will be written. Each line of the - * .csv file represents a query answer, and - * it will contain the fact term names as columns. - * @param includeBlanks if {@code true}, answers containing terms of type - * {@link TermType#NAMED_NULL} (representing anonymous - * individuals introduced to satisfy rule existentially - * quantified variables) will be included. Otherwise, the - * answers will only contain those with terms of type - * {@link TermType#CONSTANT} (representing named - * individuals). + * @param query a {@link PositiveLiteral} representing the query to be + * answered. + * @param csvFilePath path to a .csv file where the query answers + * will be written. Each line of the .csv file + * represents a query answer, and it will contain the fact + * term names as columns. + * @param includeNulls if {@code true}, answers containing terms of type + * {@link TermType#NAMED_NULL} (representing anonymous + * individuals introduced to satisfy rule existentially + * quantified variables) will be included. Otherwise, the + * answers will only contain those with terms of type + * {@link TermType#CONSTANT} (representing named + * individuals). * * @throws IOException if an I/O error occurs regarding given file * ({@code csvFilePath)}. @@ -389,7 +389,7 @@ public static Reasoner getInstance() { * * */ - Correctness exportQueryAnswersToCsv(PositiveLiteral query, String csvFilePath, boolean includeBlanks) + Correctness exportQueryAnswersToCsv(PositiveLiteral query, String csvFilePath, boolean includeNulls) throws IOException; /** diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/TermToVLogConverter.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/TermToVLogConverter.java index ca892cc35..400943c75 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/TermToVLogConverter.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/TermToVLogConverter.java @@ -90,8 +90,6 @@ public static String getVLogNameForConstant(Constant constant) { * Converts the string representation of a constant in VLog4j directly to the * name of a constant in VLog, without parsing it into a {@link Constant} first. * - * TODO Is this the appropriate location for this code? - * * @param vLog4jConstantName * @return VLog constant string */ @@ -124,7 +122,7 @@ public karmaresearch.vlog.Term visit(ExistentialVariable term) { } /** - * Transforms a Blank to a {@link karmaresearch.vlog.Term} with the same name + * Transforms a named null to a {@link karmaresearch.vlog.Term} with the same name * and type {@link karmaresearch.vlog.Term.TermType#BLANK}. */ @Override diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverter.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverter.java index aa4759c0c..27fd1a2d5 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverter.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverter.java @@ -23,6 +23,7 @@ import java.util.ArrayList; import java.util.List; +import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; @@ -81,23 +82,7 @@ static Term toTerm(karmaresearch.vlog.Term vLogTerm) { String name = vLogTerm.getName(); switch (vLogTerm.getTermType()) { case CONSTANT: - if (name.charAt(0) == '<' && name.charAt(name.length() - 1) == '>') { // strip <> off IRIs - return new AbstractConstantImpl(name.substring(1, name.length() - 1)); - } else if (name.charAt(0) == '"') { - if (name.charAt(name.length() - 1) == '>') { - int startTypeIdx = name.lastIndexOf('<', name.length() - 2); - String datatype = name.substring(startTypeIdx + 1, name.length() - 1); - String lexicalValue = name.substring(1, startTypeIdx - 3); - return new DatatypeConstantImpl(lexicalValue, datatype); - } else { - int startTypeIdx = name.lastIndexOf('@', name.length() - 2); - String languageTag = name.substring(startTypeIdx + 1, name.length()); - String string = name.substring(1, startTypeIdx - 1); - return new LanguageStringConstantImpl(string, languageTag); - } - } else { - return new AbstractConstantImpl(name); - } + return toConstant(name); case BLANK: return new NamedNullImpl(name); case VARIABLE: @@ -108,4 +93,31 @@ static Term toTerm(karmaresearch.vlog.Term vLogTerm) { } } + /** + * Creates a {@link Constant} from the given VLog constant name. + * + * @param vLogConstantName the string name used by VLog + * @return {@link Constant} object + */ + private static Constant toConstant(String vLogConstantName) { + if (vLogConstantName.charAt(0) == '<' && vLogConstantName.charAt(vLogConstantName.length() - 1) == '>') { + // strip <> off of IRIs + return new AbstractConstantImpl(vLogConstantName.substring(1, vLogConstantName.length() - 1)); + } else if (vLogConstantName.charAt(0) == '"') { + if (vLogConstantName.charAt(vLogConstantName.length() - 1) == '>') { + int startTypeIdx = vLogConstantName.lastIndexOf('<', vLogConstantName.length() - 2); + String datatype = vLogConstantName.substring(startTypeIdx + 1, vLogConstantName.length() - 1); + String lexicalValue = vLogConstantName.substring(1, startTypeIdx - 3); + return new DatatypeConstantImpl(lexicalValue, datatype); + } else { + int startTypeIdx = vLogConstantName.lastIndexOf('@', vLogConstantName.length() - 2); + String languageTag = vLogConstantName.substring(startTypeIdx + 1, vLogConstantName.length()); + String string = vLogConstantName.substring(1, startTypeIdx - 1); + return new LanguageStringConstantImpl(string, languageTag); + } + } else { + return new AbstractConstantImpl(vLogConstantName); + } + } + } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java index 0b46a1e38..cb787b901 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java @@ -50,9 +50,9 @@ public void testGettersLiterals() { final Literal positiveLiteral1 = Expressions.makePositiveLiteral("p", x, c); final NegativeLiteral negativeLiteral2 = Expressions.makeNegativeLiteral("p", y, x); final Literal positiveLiteral3 = Expressions.makePositiveLiteral("q", x, d); - final Literal negativeLiteral4 = Expressions.makePositiveLiteral("q", y, d, z); + final Literal positiveLiteral4 = Expressions.makePositiveLiteral("q", y, d, z); final List literalList = Arrays.asList(positiveLiteral1, negativeLiteral2, positiveLiteral3, - negativeLiteral4); + positiveLiteral4); final Conjunction conjunction = new ConjunctionImpl<>(literalList); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PositiveLiteralImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PositiveLiteralImplTest.java index 8c3a7d8c1..c358cecdf 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PositiveLiteralImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PositiveLiteralImplTest.java @@ -46,9 +46,6 @@ public void testGetters() { final PositiveLiteral atomP = Expressions.makePositiveLiteral("p", x, c, d, y); final PositiveLiteral atomQ = Expressions.makePositiveLiteral("q", c, d); -// final Set variables = Sets.newSet(x, y); -// final Set constants = Sets.newSet(c, d); - assertEquals("p", atomP.getPredicate().getName()); assertEquals(atomP.getArguments().size(), atomP.getPredicate().getArity()); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java index b84ef8324..3f3ecf02f 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java @@ -91,7 +91,6 @@ public void testEquals() { final Rule rule4 = new RuleImpl(bodyPositiveLiterals, bodyLiterals); final Rule rule5 = new RuleImpl(bodyPositiveLiterals, bodyLiterals); -// assertNotEquals(rule3, rule1); assertNotEquals(rule4, rule1); assertNotEquals(rule5, rule1); assertFalse(rule1.equals(null)); diff --git a/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverter.java b/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverter.java index ad67c6ba7..bec53d7fb 100644 --- a/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverter.java +++ b/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverter.java @@ -60,7 +60,10 @@ private GraalToVLog4JModelConverter() { * Converts a {@link fr.lirmm.graphik.graal.api.core.Atom Graal Atom} into a * {@link PositiveLiteral VLog4J PositiveLiteral}. * - * @param atom A {@link fr.lirmm.graphik.graal.api.core.Atom Graal Atom} + * @param atom A {@link fr.lirmm.graphik.graal.api.core.Atom + * Graal Atom} + * @param existentialVariables set of variables that are existentially + * quantified * @return A {@link PositiveLiteral VLog4J PositiveLiteral} */ public static PositiveLiteral convertAtom(final fr.lirmm.graphik.graal.api.core.Atom atom, @@ -122,7 +125,9 @@ public static List convertAtomsToFacts(final List convertAtomSet(final AtomSet atomSet, @@ -237,7 +242,9 @@ public static List convertRules(final List replaceTerm(term, oldTerm, newTerm)).collect(Collectors.toList())); } + return new PositiveLiteralImpl(literal.getPredicate(), + literal.getTerms().map(term -> replaceTerm(term, oldTerm, newTerm)).collect(Collectors.toList())); } /** @@ -208,7 +207,7 @@ void startAxiomConversion() { /** * Processes an OWL class inclusion axiom with the two class expressions as - * give, and adds the resulting rules. The method proceeds by first converting + * given, and adds the resulting rules. The method proceeds by first converting * the superclass, then converting the subclass with the same body and head atom * buffers, and finally creating a rule from the collected body and head. The * conversions may lead to auxiliary rules being created during processing, so From 23cc89fc0281e01188cc0209d7f96145217ae4a1 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 25 Oct 2019 15:31:03 +0200 Subject: [PATCH 0326/1003] simplified code as per review --- .../owlapi/OwlAxiomToRulesConverter.java | 38 ++++++++++--------- 1 file changed, 20 insertions(+), 18 deletions(-) diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java index f15f57abc..7d2294220 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java +++ b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java @@ -129,32 +129,34 @@ Variable getFreshExistentialVariable() { * @param converter */ void addRule(final AbstractClassToRuleConverter converter) { - if (converter.isTautology()) { - return; - } - Conjunction headConjunction; - if (converter.head.isFalseOrEmpty()) { - headConjunction = new ConjunctionImpl<>( - Arrays.asList(OwlToRulesConversionHelper.getBottom(converter.mainTerm))); - } else { - headConjunction = new ConjunctionImpl<>(converter.head.getConjuncts()); - } + if (!converter.isTautology()) { + final Conjunction headConjunction = this.constructHeadConjunction(converter); - Conjunction bodyConjunction; - if (converter.body.isTrueOrEmpty()) { - bodyConjunction = new ConjunctionImpl<>( - Arrays.asList(OwlToRulesConversionHelper.getTop(converter.mainTerm))); - if (headConjunction.getVariables().count() == 0) { + if (converter.body.isTrueOrEmpty() && (headConjunction.getVariables().count() == 0)) { for (final PositiveLiteral conjunct : headConjunction.getLiterals()) { this.facts.add(new FactImpl(conjunct.getPredicate(), conjunct.getArguments())); } - return; + } else { + final Conjunction bodyConjunction = this.constructBodyConjunction(converter); + this.rules.add(Expressions.makePositiveLiteralsRule(headConjunction, bodyConjunction)); } + } + } + + private Conjunction constructBodyConjunction(final AbstractClassToRuleConverter converter) { + if (converter.body.isTrueOrEmpty()) { + return new ConjunctionImpl<>(Arrays.asList(OwlToRulesConversionHelper.getTop(converter.mainTerm))); } else { - bodyConjunction = new ConjunctionImpl<>(converter.body.getConjuncts()); + return new ConjunctionImpl<>(converter.body.getConjuncts()); } + } - this.rules.add(Expressions.makePositiveLiteralsRule(headConjunction, bodyConjunction)); + private Conjunction constructHeadConjunction(final AbstractClassToRuleConverter converter) { + if (converter.head.isFalseOrEmpty()) { + return new ConjunctionImpl<>(Arrays.asList(OwlToRulesConversionHelper.getBottom(converter.mainTerm))); + } else { + return new ConjunctionImpl<>(converter.head.getConjuncts()); + } } Term replaceTerm(Term term, Term oldTerm, Term newTerm) { From 41258c476776b99c707274c67dc3385535cdd80b Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 25 Oct 2019 15:40:41 +0200 Subject: [PATCH 0327/1003] Nicer stream merging as per review --- .../core/model/implementation/ConjunctionImpl.java | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java index 7c5a604ed..726cefc9a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java @@ -55,18 +55,9 @@ public List getLiterals() { return Collections.unmodifiableList(this.literals); } - @SuppressWarnings("resource") @Override public Stream getTerms() { - Stream result = null; - for (final T literal : this.literals) { - if (result == null) { - result = literal.getTerms(); - } else { - result = Stream.concat(result, literal.getTerms()); - } - } - return result.distinct(); + return this.literals.stream().flatMap(l -> l.getTerms()).distinct(); } @Override From 091344f69f650436e60d7ad571fcd67ab1a4ec7c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Markus=20Kr=C3=B6tzsch?= Date: Fri, 25 Oct 2019 15:56:53 +0200 Subject: [PATCH 0328/1003] Notes on data model changes --- RELEASE-NOTES.md | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 6a396b8e8..a94bb0447 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -1,6 +1,23 @@ VLog4j Release Notes ==================== +VLog4j v0.5.0 +------------- + +Breaking changes: +* The data model for rules has been refined and changed: + * Instead of Constant, specific types of constants are used to capture abtract and data values + * Instead of Variable, ExistentialVariable and UniversalVariable now indicate quantification + * Bland was renamed to NamedNull to avoid confusion with RDF blank nodes + * Methods to access terms now use Java Streams and are unified across syntactic objects + +New features: +* ... + +Other improvements: +* Data model is better aligned with syntax supported by parser + + VLog4j v0.4.0 ------------- From bbfdc960c6f16a5c15744abd9d64b124d24093d2 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 25 Oct 2019 17:33:40 +0200 Subject: [PATCH 0329/1003] Quick and dirty fix for #128 --- RELEASE-NOTES.md | 3 +++ .../vlog4j/core/reasoner/implementation/VLogReasoner.java | 7 +++++-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index a94bb0447..049a8e72e 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -16,6 +16,9 @@ New features: Other improvements: * Data model is better aligned with syntax supported by parser + +Bugfixes: +* Acyclicity checks work again without calling reason() first (issue #128) VLog4j v0.4.0 diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 8c295cdac..d355e17f2 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -705,8 +705,11 @@ public boolean isMFC() { private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) { validateNotClosed(); if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { - throw new ReasonerStateException(this.reasonerState, - "Checking rules acyclicity is not allowed before loading!"); + try { + load(); + } catch (IOException e) { // FIXME: quick fix for https://github.com/knowsys/vlog4j/issues/128 + throw new RuntimeException(e); + } } CyclicCheckResult checkCyclic; From 012a8ca56441c039dcf53bf9402387e9cd828474 Mon Sep 17 00:00:00 2001 From: Ali Elhalawati Date: Sun, 27 Oct 2019 15:21:13 +0100 Subject: [PATCH 0330/1003] Added Serializer Class with toString methods --- .../core/model/implementation/Serializer.java | 72 +++++++++++++++++++ 1 file changed, 72 insertions(+) create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java new file mode 100644 index 000000000..487ffbf1d --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -0,0 +1,72 @@ +package org.semanticweb.vlog4j.core.model.implementation; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.vlog4j.core.model.api.Conjunction; +import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.Literal; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Variable; + +/** + * Simple class implementation of various toString methods to ensure the correct + * parsable string output of the different Data models. + * + * @author Ali Elhalawati + * + */ +public class Serializer { + + public Serializer() { + + } + + public static String getConjunctionString(Conjunction conjunction) { + return conjunction.toString(); + } + + public static String getConstantString(Constant constant) { + return constant.toString(); + } + + public static String getPredicateString(Predicate predicate) { + return predicate.toString(); + } + + public static String getVariableString(Variable variable) { + return variable.toString(); + } + + public static String getRuleString(Rule rule) { + return rule.toString() + " ."; + } + + public static String getLiteralString(Literal literal) { + return literal.toString(); + } + + public static String getFactString(Fact fact) { + return fact.toString() + "."; + } + +} From a6a42635d3731520dbc22d83dbc23338a8522242 Mon Sep 17 00:00:00 2001 From: alloka Date: Mon, 28 Oct 2019 16:08:47 +0100 Subject: [PATCH 0331/1003] fix tostring in Serializer and datamodels --- LICENSE.txt | 402 +++++++++--------- .../implementation/AbstractConstantImpl.java | 2 +- .../implementation/AbstractLiteralImpl.java | 17 +- .../model/implementation/ConjunctionImpl.java | 2 +- .../DataSourceDeclarationImpl.java | 2 +- .../implementation/DatatypeConstantImpl.java | 2 +- .../ExistentialVariableImpl.java | 2 +- .../LanguageStringConstantImpl.java | 2 +- .../model/implementation/NamedNullImpl.java | 2 +- .../core/model/implementation/RuleImpl.java | 2 +- .../core/model/implementation/Serializer.java | 101 +++++ .../implementation/UniversalVariableImpl.java | 2 +- vlog4j-examples/LICENSE.txt | 402 +++++++++--------- vlog4j-graal/LICENSE.txt | 402 +++++++++--------- vlog4j-parser/LICENSE.txt | 402 +++++++++--------- 15 files changed, 915 insertions(+), 829 deletions(-) create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java diff --git a/LICENSE.txt b/LICENSE.txt index 29f81d812..261eeb9e9 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -1,201 +1,201 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractConstantImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractConstantImpl.java index 84cccf81f..86e97a061 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractConstantImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractConstantImpl.java @@ -47,6 +47,6 @@ public T accept(TermVisitor termVisitor) { @Override public String toString() { - return this.getName(); + return Serializer.getConstantString(this); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractLiteralImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractLiteralImpl.java index c5b43cfb1..80bd418e6 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractLiteralImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractLiteralImpl.java @@ -94,22 +94,7 @@ public boolean equals(final Object obj) { @Override public String toString() { - final StringBuilder stringBuilder = new StringBuilder(""); - if (this.isNegated()) { - stringBuilder.append("~"); - } - stringBuilder.append(this.getPredicate().getName()).append("("); - boolean first = true; - for (final Term term : this.getArguments()) { - if (first) { - first = false; - } else { - stringBuilder.append(", "); - } - stringBuilder.append(term); - } - stringBuilder.append(")"); - return stringBuilder.toString(); + return Serializer.getLiteralString(this); } @Override diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java index 726cefc9a..69df24198 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java @@ -95,7 +95,7 @@ public String toString() { } else { stringBuilder.append(", "); } - stringBuilder.append(literal.toString()); + stringBuilder.append(Serializer.getLiteralString((AbstractLiteralImpl) literal)); } return stringBuilder.toString(); } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java index 150475b47..217538610 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java @@ -85,7 +85,7 @@ public T accept(StatementVisitor statementVisitor) { @Override public String toString() { - return "@source " + this.predicate.toString() + "(" + this.predicate.getArity() + ") : " + return "@source " + Serializer.getPredicateString(this.predicate) + "(" + this.predicate.getArity() + ") : " + this.dataSource.toConfigString() + " ."; } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java index cb9d1a82f..907758747 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java @@ -60,7 +60,7 @@ public String getLexicalValue() { @Override public String toString() { - return this.getName(); + return Serializer.getDatatypeConstantString(this); } @Override diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ExistentialVariableImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ExistentialVariableImpl.java index ff0d893ca..a570bd615 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ExistentialVariableImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ExistentialVariableImpl.java @@ -46,6 +46,6 @@ public T accept(TermVisitor termVisitor) { @Override public String toString() { - return "!" + this.getName(); + return Serializer.getExistentialVarString(this); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java index ea99774c2..084143187 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java @@ -84,7 +84,7 @@ public boolean equals(Object obj) { @Override public String toString() { - return getName(); + return Serializer.getLanguageConstantString(this); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NamedNullImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NamedNullImpl.java index c676bd2f3..d6b1d2e9b 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NamedNullImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NamedNullImpl.java @@ -48,6 +48,6 @@ public T accept(TermVisitor termVisitor) { @Override public String toString() { - return "_" + this.getName(); + return Serializer.getNamedNullString(this); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java index 135ded2d6..c83e79a30 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java @@ -105,7 +105,7 @@ public boolean equals(final Object obj) { @Override public String toString() { - return this.head + " :- " + this.body; + return Serializer.getRuleString(this); } @Override diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java new file mode 100644 index 000000000..9402fe467 --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -0,0 +1,101 @@ +package org.semanticweb.vlog4j.core.model.implementation; + +import java.util.List; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.vlog4j.core.model.api.Conjunction; +import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.Literal; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.model.api.Variable; + +/** + * Simple class implementation of various toString methods to ensure the correct + * parsable string output of the different Data models. + * + * @author Ali Elhalawati + * + */ +public class Serializer { + + public Serializer() { + + } + + public static String getRuleString(RuleImpl rule) { + return rule.getHead() + " :- " + rule.getBody() + "."; + } + + public static String getLiteralString(AbstractLiteralImpl literal) { + final StringBuilder stringBuilder = new StringBuilder(""); + if (literal.isNegated()) { + stringBuilder.append("~"); + } + stringBuilder.append(literal.getPredicate().getName()).append("("); + boolean first = true; + for (final Term term : literal.getArguments()) { + if (first) { + first = false; + } else { + stringBuilder.append(", "); + } + stringBuilder.append(term); + } + stringBuilder.append(")"); + return stringBuilder.toString(); + } + + public static String getFactString(FactImpl fact) { + return fact.toString() + "."; + } + + public static String getConstantString(AbstractConstantImpl constant) { + return constant.getName(); + } + + public static String getExistentialVarString(ExistentialVariableImpl existentialvariable) { + return "!" + existentialvariable.getName(); + } + + public static String getUniversalVarString(UniversalVariableImpl universalvariable) { + return "?" + universalvariable.getName(); + } + + public static String getDatatypeConstantString(DatatypeConstantImpl datatypeconstant) { + return datatypeconstant.getName(); + } + + public static String getLanguageConstantString(LanguageStringConstantImpl languagestringconstant) { + return languagestringconstant.getName(); + } + + public static String getNamedNullString(NamedNullImpl namednull) { + return "_" + namednull.toString(); + } + public static String getPredicateString(Predicate predicate) { + return " Predicate [ name= " + predicate.getName() + ", arity= " + predicate.getArity() + "]"; + } + +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/UniversalVariableImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/UniversalVariableImpl.java index eaec2eb74..838eab7f3 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/UniversalVariableImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/UniversalVariableImpl.java @@ -46,6 +46,6 @@ public T accept(TermVisitor termVisitor) { @Override public String toString() { - return "?" + this.getName(); + return Serializer.getUniversalVarString(this); } } diff --git a/vlog4j-examples/LICENSE.txt b/vlog4j-examples/LICENSE.txt index 29f81d812..261eeb9e9 100644 --- a/vlog4j-examples/LICENSE.txt +++ b/vlog4j-examples/LICENSE.txt @@ -1,201 +1,201 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vlog4j-graal/LICENSE.txt b/vlog4j-graal/LICENSE.txt index 29f81d812..261eeb9e9 100644 --- a/vlog4j-graal/LICENSE.txt +++ b/vlog4j-graal/LICENSE.txt @@ -1,201 +1,201 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vlog4j-parser/LICENSE.txt b/vlog4j-parser/LICENSE.txt index 29f81d812..261eeb9e9 100644 --- a/vlog4j-parser/LICENSE.txt +++ b/vlog4j-parser/LICENSE.txt @@ -1,201 +1,201 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. From cc3f1b565a0165118576cf0644451fd7bd865a18 Mon Sep 17 00:00:00 2001 From: alloka Date: Mon, 28 Oct 2019 16:58:17 +0100 Subject: [PATCH 0332/1003] test trial --- .../vlog4j/core/model/implementation/Serializer.java | 4 ---- .../java/org/semanticweb/vlog4j/core/model/RuleImplTest.java | 2 ++ 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 9402fe467..c4b6fc118 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -40,10 +40,6 @@ */ public class Serializer { - public Serializer() { - - } - public static String getRuleString(RuleImpl rule) { return rule.getHead() + " :- " + rule.getBody() + "."; } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java index 3f3ecf02f..be3cee765 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java @@ -78,6 +78,7 @@ public void testEquals() { final Rule rule8 = Expressions.makePositiveLiteralsRule(headPositiveLiterals, bodyPositiveLiterals); assertEquals(rule1, rule1); + assertEquals(rule1.toString(),"q(?X, !Y) :- p(?X, c), p(?X, ?Z)."); assertEquals(rule2, rule1); assertEquals(rule2.hashCode(), rule1.hashCode()); @@ -95,6 +96,7 @@ public void testEquals() { assertNotEquals(rule5, rule1); assertFalse(rule1.equals(null)); assertFalse(rule1.equals(c)); + } @Test(expected = IllegalArgumentException.class) From f0a149e91b1049cb03a56c43aa3fc4b655e02c75 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Wed, 30 Oct 2019 19:16:43 +0100 Subject: [PATCH 0333/1003] update helper classes in client; update unit test for client --- .../client/picocli/PrintQueryResults.java | 31 +-- .../org/vlog4j/client/picocli/SaveModel.java | 39 +++- .../client/picocli/SaveQueryResults.java | 53 +++-- .../vlog4j/client/picocli/VLog4jClient.java | 2 +- .../picocli/VLog4jClientMaterialize.java | 51 +++-- .../client/picocli/PrintQueryResultsTest.java | 78 +++---- .../vlog4j/client/picocli/SaveModelTest.java | 190 ++++++++--------- .../client/picocli/SaveQueryResultsTest.java | 192 ++++++++---------- 8 files changed, 314 insertions(+), 322 deletions(-) diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/PrintQueryResults.java b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/PrintQueryResults.java index dbf8039e8..cd32d547a 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/PrintQueryResults.java +++ b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/PrintQueryResults.java @@ -1,7 +1,5 @@ package org.vlog4j.client.picocli; -import javax.naming.ConfigurationException; - /*- * #%L * VLog4j Client @@ -32,6 +30,8 @@ */ public class PrintQueryResults { + static final String configurationErrorMessage = "Configuration Error: @code{--print-query-result-size} and @code{--print-query-result} are mutually exclusive. Set only one to true.\n Exiting the program."; + /** * If true, Vlog4jClient will print the size of the query result. Mutually * exclusive with {@code --print-complete-query-result} @@ -50,37 +50,42 @@ public class PrintQueryResults { @Option(names = "--print-complete-query-result", description = "Boolean. If true, Vlog4jClient will print the query result in stdout. False by default.") private boolean complete = false; + public PrintQueryResults() { + } + + public PrintQueryResults(boolean sizeOnly, boolean complete) { + this.sizeOnly = sizeOnly; + this.complete = complete; + } + /** * Check correct configuration of the class. @code{--print-query-result-size} * and @code{--print-query-result} are mutually exclusive. * - * @throws ConfigurationException + * @return @code{true} if configuration is valid. */ - public void validate() throws ConfigurationException { - String error = "@code{--print-query-result-size} and @code{--print-query-result} are mutually exclusive. Set only one to true."; - if (sizeOnly && complete) { - throw new ConfigurationException(error); - } + protected boolean isValid() { + return !sizeOnly || !complete; } - public void printConfiguration() { + protected void printConfiguration() { System.out.println(" --print-query-result-size: " + sizeOnly); System.out.println(" --print-complete-query-result: " + complete); } - public boolean isSizeOnly() { + protected boolean isSizeOnly() { return sizeOnly; } - public void setSizeOnly(boolean sizeOnly) { + protected void setSizeOnly(boolean sizeOnly) { this.sizeOnly = sizeOnly; } - public boolean isComplete() { + protected boolean isComplete() { return complete; } - public void setComplete(boolean complete) { + protected void setComplete(boolean complete) { this.complete = complete; } } diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveModel.java b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveModel.java index 1ec6d0bac..b74a9b43a 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveModel.java +++ b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveModel.java @@ -22,8 +22,6 @@ import java.io.File; -import javax.naming.ConfigurationException; - import picocli.CommandLine.Option; /** @@ -34,6 +32,9 @@ */ public class SaveModel { + static final String configurationErrorMessage = "Configuration Error: If @code{--save-model} is true, then a non empty @code{--output-model-directory} is required.\nExiting the program."; + static final String wrongDirectoryErrorMessage = "Configuration Error: wrong @code{--output-model-directory}. Please check the path.\nExiting the program."; + /** * If true, Vlog4jClient will save the model in {@code --output-model-directory} * @@ -50,25 +51,43 @@ public class SaveModel { @Option(names = "--output-model-directory", description = "Directory to store the model. Used only if --store-model is true. \"model\" by default.") private String outputModelDirectory = "model"; + public SaveModel() { + } + + public SaveModel(boolean saveModel, String outputDir) { + this.saveModel = saveModel; + this.outputModelDirectory = outputDir; + } + /** * Check correct configuration of the class. If @code{--save-model} is true, * then a non-empty @code{--output-model-directory} is required. * - * @throws ConfigurationException + * @return @code{true} if configuration is valid. */ - public void validate() throws ConfigurationException { - String error_message = "If @code{--save-model} is true, then a non empty @code{--output-model-directory} is required."; - if (saveModel && (outputModelDirectory == null || outputModelDirectory.isEmpty())) { - throw new ConfigurationException(error_message); - } + protected boolean isConfigurationValid() { + return !saveModel || (outputModelDirectory != null && !outputModelDirectory.isEmpty()); + } + + /** + * Check that the path to store the model is either non-existing or a directory. + * + * @return @code{true} if conditions are satisfied. + */ + protected boolean isDirectoryValid() { + File file = new File(outputModelDirectory); + return !file.exists() || file.isDirectory(); } /** * Create directory to store the model */ - public void prepare() { + public void mkdir() { if (saveModel) { - new File(outputModelDirectory).mkdirs(); + File file = new File(outputModelDirectory); + if (!file.exists()) { + file.mkdirs(); + } } } diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveQueryResults.java b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveQueryResults.java index 0bebbd58b..a0c20f91f 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveQueryResults.java +++ b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveQueryResults.java @@ -22,8 +22,6 @@ import java.io.File; -import javax.naming.ConfigurationException; - import picocli.CommandLine.Option; /** @@ -34,6 +32,9 @@ */ public class SaveQueryResults { + static final String configurationErrorMessage = "Configuration Error: If @code{--save-query-results} is true, then a non empty @code{--output-query-result-directory} is required.\nExiting the program."; + static final String wrongDirectoryErrorMessage = "Configuration Error: wrong @code{--output-query-result-directory}. Please check the path.\nExiting the program."; + /** * If true, Vlog4jClient will save the query result in * {@code --output-query-result-directory} @@ -52,46 +53,66 @@ public class SaveQueryResults { @Option(names = "--output-query-result-directory", description = "Directory to store the model. Used only if --save-query-results is true. \"query-results\" by default.") private String outputQueryResultDirectory = "query-results"; + public SaveQueryResults() { + } + + public SaveQueryResults(boolean saveResults, String outputDir) { + this.saveResults = saveResults; + this.outputQueryResultDirectory = outputDir; + } + /** * Check correct configuration of the class. If @code{--save-query-results} is * true, then a non-empty @code{--output-query-result-directory} is required. * - * @throws ConfigurationException + * @return @code{true} if configuration is valid. */ - public void validate() throws ConfigurationException { - String error_message = "If @code{--save-query-results} is true, then a non empty @code{--output-query-result-directory} is required."; - if (saveResults && (outputQueryResultDirectory == null || outputQueryResultDirectory.isEmpty())) { - throw new ConfigurationException(error_message); - } + protected boolean isConfigurationValid() { + return !saveResults || (outputQueryResultDirectory != null && !outputQueryResultDirectory.isEmpty()); + } + + /** + * Check that the path to store the query results is either non-existing or a + * directory. + * + * @return @code{true} if conditions are satisfied. + */ + protected boolean isDirectoryValid() { + File file = new File(outputQueryResultDirectory); + return !file.exists() || file.isDirectory(); } /** - * Create directory to store query results + * Create directory to store query results if not present. It assumes that + * configuration and directory are valid. */ - public void prepare() { + protected void mkdir() { if (saveResults) { - new File(outputQueryResultDirectory).mkdirs(); + File file = new File(outputQueryResultDirectory); + if (!file.exists()) { + file.mkdirs(); + } } } - public void printConfiguration() { + protected void printConfiguration() { System.out.println(" --save-query-results: " + saveResults); System.out.println(" --output-query-result-directory: " + outputQueryResultDirectory); } - public boolean isSaveResults() { + protected boolean isSaveResults() { return saveResults; } - public void setSaveResults(boolean saveResults) { + protected void setSaveResults(boolean saveResults) { this.saveResults = saveResults; } - public String getOutputQueryResultDirectory() { + protected String getOutputQueryResultDirectory() { return outputQueryResultDirectory; } - public void setOutputQueryResultDirectory(String outputQueryResultDirectory) { + protected void setOutputQueryResultDirectory(String outputQueryResultDirectory) { this.outputQueryResultDirectory = outputQueryResultDirectory; } diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClient.java b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClient.java index c317e5ebe..f35f01fb6 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClient.java +++ b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClient.java @@ -31,7 +31,7 @@ * */ @Command(name = "java -jar VLog4jClient.jar", description = "VLog4jClient: A command line client of VLog4j.", subcommands = { - VLog4jClientMaterialize.class}) + VLog4jClientMaterialize.class }) public class VLog4jClient implements Runnable { public static void main(String[] args) { diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java index c77e8050c..75973312f 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java +++ b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java @@ -26,8 +26,6 @@ import java.util.ArrayList; import java.util.List; -import javax.naming.ConfigurationException; - import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.reasoner.Algorithm; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; @@ -54,8 +52,7 @@ public class VLog4jClientMaterialize implements Runnable { private final KnowledgeBase kb = new KnowledgeBase(); private final List queries = new ArrayList<>(); - // TODO add link to rls syntax - @Option(names = "--rule-file", description = "Rule file(s) in rls syntax", required = true) + @Option(names = "--rule-file", description = "Rule file(s) in {@link https://github.com/knowsys/vlog4j/wiki/Rule-syntax-grammar} syntax", required = true) private List ruleFiles = new ArrayList<>(); // TODO @@ -115,15 +112,22 @@ public void run() { } private void validateConfiguration() { - try { - printQueryResults.validate(); - saveQueryResults.validate(); - // TODO saveModel.validate(); - } catch (ConfigurationException e) { - System.err.println("Configuration Error: " + e.getMessage()); - System.err.println("Exiting the program."); - System.exit(1); + if (!printQueryResults.isValid()) { + printMessageAndExit(PrintQueryResults.configurationErrorMessage); + } + if (!saveQueryResults.isConfigurationValid()) { + printMessageAndExit(SaveQueryResults.configurationErrorMessage); + } + if (saveQueryResults.isSaveResults() && !saveQueryResults.isDirectoryValid()) { + printMessageAndExit(SaveQueryResults.wrongDirectoryErrorMessage); } + // TODO + // if (!saveModel.isConfigurationValid()) { + // printMessageAndExit(SaveModel.configurationErrorMessage); + // } + // if (saveModel.isSaveResults() && !saveModel.isDirectoryValid()) { + // printMessageAndExit(SaveModel.wrongDirectoryErrorMessage); + // } } private void configureRules() { @@ -131,13 +135,11 @@ private void configureRules() { try { RuleParser.parseInto(kb, new FileInputStream(ruleFile)); } catch (FileNotFoundException e1) { - System.err.println("File not found: " + ruleFile + ". " + e1.getMessage()); - System.err.println("Exiting the program."); - System.exit(1); + throw new RuntimeException( + "File not found: " + ruleFile + ". " + e1.getMessage() + "\nExiting the program."); } catch (ParsingException e2) { - System.err.println("Failed to parse rule file: " + ruleFile + ". " + e2.getMessage()); - System.err.println("Exiting the program."); - System.exit(1); + throw new RuntimeException( + "Failed to parse rule file: " + ruleFile + ". " + e2.getMessage() + "\nExiting the program."); } } } @@ -171,14 +173,14 @@ private void materialize(Reasoner reasoner) { try { reasoner.reason(); } catch (IOException e) { - System.err.println("Something went wrong. Please check the log file." + e.getMessage()); - System.err.println("Exiting the program."); - System.exit(1); + throw new RuntimeException( + "Something went wrong. Please check the log file." + e.getMessage() + "\nExiting the program."); } } // TODO private void saveModel() {...} + private void answerQueries(Reasoner reasoner) { if (!queries.isEmpty()) { System.out.println("Answering queries ..."); @@ -222,6 +224,7 @@ private void printConfiguration() { } private void doSaveQueryResults(Reasoner reasoner, PositiveLiteral query) { + saveQueryResults.mkdir(); try { reasoner.exportQueryAnswersToCsv(query, queryOputputPath(query), true); } catch (IOException e) { @@ -238,4 +241,10 @@ private void doPrintResults(Reasoner reasoner, PositiveLiteral query) { private String queryOputputPath(PositiveLiteral query) { return saveQueryResults.getOutputQueryResultDirectory() + "/" + query + ".csv"; } + + private void printMessageAndExit(String message) { + System.err.println(message); + System.exit(1); + } + } diff --git a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/PrintQueryResultsTest.java b/vlog4j-client/src/test/java/org/vlog4j/client/picocli/PrintQueryResultsTest.java index 65f07f306..686fe7506 100644 --- a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/PrintQueryResultsTest.java +++ b/vlog4j-client/src/test/java/org/vlog4j/client/picocli/PrintQueryResultsTest.java @@ -21,98 +21,84 @@ */ import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; import java.io.ByteArrayOutputStream; import java.io.PrintStream; import org.junit.Test; import org.vlog4j.client.picocli.PrintQueryResults; -import javax.naming.ConfigurationException; public class PrintQueryResultsTest { String outputConfigurationBase = " --print-query-result-size: %b\n --print-complete-query-result: %b\n"; + private final PrintQueryResults sizeTrueCompleteTrue = new PrintQueryResults(true, true); + private final PrintQueryResults sizeTrueCompleteFalse = new PrintQueryResults(true, false); + private final PrintQueryResults sizeFalseCompleteTrue = new PrintQueryResults(false, true); + private final PrintQueryResults sizeFalseCompleteFalse = new PrintQueryResults(false, false); + @Test - public void validate_sizeTrueCompleteFalse_valid() throws ConfigurationException { + public void isValid_sizeTrueCompleteFalse_valid() { // default configuration - PrintQueryResults prq = new PrintQueryResults(); - prq.setSizeOnly(true); - prq.setComplete(false); - prq.validate(); + assertTrue(sizeTrueCompleteFalse.isValid()); } @Test - public void validate_sizeFalseCompleteTrue_valid() throws ConfigurationException { - PrintQueryResults prq = new PrintQueryResults(); - prq.setSizeOnly(false); - prq.setComplete(true); - prq.validate(); + public void isValid_sizeFalseCompleteTrue_valid() { + assertTrue(sizeFalseCompleteTrue.isValid()); } - @Test(expected = ConfigurationException.class) - public void validate_sizeTrueCompleteTrue_notValid() throws ConfigurationException { - PrintQueryResults prq = new PrintQueryResults(); - prq.setSizeOnly(true); - prq.setComplete(true); - prq.validate(); + @Test + public void isValid_sizeTrueCompleteTrue_notValid() { + assertFalse(sizeTrueCompleteTrue.isValid()); } @Test - public void validate_sizeFalseCompleteFalse_valid() throws ConfigurationException { - PrintQueryResults prq = new PrintQueryResults(); - prq.setSizeOnly(false); - prq.setComplete(false); - prq.validate(); + public void isValid_sizeFalseCompleteFalse_valid() { + assertTrue(sizeFalseCompleteFalse.isValid()); } @Test - public void printConfiguration_sizeTrueCompleteFalse_valid() { - PrintQueryResults prq = new PrintQueryResults(); - prq.setSizeOnly(true); - prq.setComplete(false); - assertEquals(String.format(outputConfigurationBase, true, false), captureOutputPrintConfiguration(prq)); + public void printConfiguration_sizeTrueCompleteFalse() { + assertEquals(String.format(outputConfigurationBase, true, false), + captureOutputPrintConfiguration(sizeTrueCompleteFalse)); } @Test - public void printConfiguration_sizeFalseCompleteTrue_valid() { - PrintQueryResults prq = new PrintQueryResults(); - prq.setSizeOnly(false); - prq.setComplete(true); - assertEquals(String.format(outputConfigurationBase, false, true), captureOutputPrintConfiguration(prq)); + public void printConfiguration_sizeFalseCompleteTrue() { + assertEquals(String.format(outputConfigurationBase, false, true), + captureOutputPrintConfiguration(sizeFalseCompleteTrue)); } @Test - public void printConfiguration_sizeTrueCompleteTrue_notValid() { - PrintQueryResults prq = new PrintQueryResults(); - prq.setSizeOnly(true); - prq.setComplete(true); - assertEquals(String.format(outputConfigurationBase, true, true), captureOutputPrintConfiguration(prq)); + public void printConfiguration_sizeTrueCompleteTrue() { + assertEquals(String.format(outputConfigurationBase, true, true), + captureOutputPrintConfiguration(sizeTrueCompleteTrue)); } @Test - public void printConfiguration_sizeFalseCompleteFalse_valid() { - PrintQueryResults prq = new PrintQueryResults(); - prq.setSizeOnly(false); - prq.setComplete(false); - assertEquals(String.format(outputConfigurationBase, false, false), captureOutputPrintConfiguration(prq)); + public void printConfiguration_sizeFalseCompleteFalse() { + assertEquals(String.format(outputConfigurationBase, false, false), + captureOutputPrintConfiguration(sizeFalseCompleteFalse)); } @Test public void setSizeOnly_and_isSizeOnly() { PrintQueryResults prq = new PrintQueryResults(); prq.setSizeOnly(false); - assertEquals(false, prq.isSizeOnly()); + assertFalse(prq.isSizeOnly()); prq.setSizeOnly(true); - assertEquals(true, prq.isSizeOnly()); + assertTrue(prq.isSizeOnly()); } @Test public void setComplete_and_isComplete() { PrintQueryResults prq = new PrintQueryResults(); prq.setComplete(false); - assertEquals(false, prq.isComplete()); + assertFalse(prq.isComplete()); prq.setComplete(true); - assertEquals(true, prq.isComplete()); + assertTrue(prq.isComplete()); } private String captureOutputPrintConfiguration(PrintQueryResults prq) { diff --git a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveModelTest.java b/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveModelTest.java index 1810221cf..53d80e267 100644 --- a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveModelTest.java +++ b/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveModelTest.java @@ -2,12 +2,15 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; import java.io.ByteArrayOutputStream; import java.io.File; +import java.io.IOException; import java.io.PrintStream; -import javax.naming.ConfigurationException; +import org.junit.Rule; /*- * #%L @@ -30,172 +33,147 @@ */ import org.junit.Test; +import org.junit.rules.TemporaryFolder; import org.vlog4j.client.picocli.SaveModel; public class SaveModelTest { private final String outputConfigurationBase = " --save-model: %b\n --output-model-directory: %s\n"; - private final String dir = "directory"; - private final String tempDir = "tempDir"; private final String defaultDir = "model"; + private final SaveModel saveTrueDefaultDir = new SaveModel(true, defaultDir); + private final SaveModel saveTrueEmptyDir = new SaveModel(true, ""); + private final SaveModel saveTrueNullDir = new SaveModel(true, null); + private final SaveModel saveFalseDefaultDir = new SaveModel(false, defaultDir); + private final SaveModel saveFalseEmptyDir = new SaveModel(false, ""); + private final SaveModel saveFalseNullDir = new SaveModel(false, null); + + @Rule + public TemporaryFolder tempFolder = new TemporaryFolder(); + @Test - public void validate_saveModelTrueDefaultDir_valid() throws ConfigurationException { - SaveModel sm = new SaveModel(); - sm.setSaveModel(true); - sm.setOutputModelDirectory(defaultDir); - sm.validate(); + public void isConfigurationValid_saveTrueDefaultDir_valid() { + assertTrue(saveTrueDefaultDir.isConfigurationValid()); } @Test - public void validate_saveModelTrueValidDir_valid() throws ConfigurationException { - // default configuration - SaveModel sm = new SaveModel(); - sm.setSaveModel(false); - sm.setOutputModelDirectory(dir); - sm.validate(); + public void isConfigurationValid_saveTrueEmptyDir_nonValid() { + assertFalse(saveTrueEmptyDir.isConfigurationValid()); } - @Test(expected = ConfigurationException.class) - public void validate_saveModelTrueEmptyDir_notValid() throws ConfigurationException { - SaveModel sm = new SaveModel(); - sm.setSaveModel(true); - sm.setOutputModelDirectory(""); - sm.validate(); + @Test + public void isConfigurationValid_saveTrueNullDir_nonValid() { + assertFalse(saveTrueNullDir.isConfigurationValid()); } - @Test(expected = ConfigurationException.class) - public void validate_saveModelTrueNullDir_notValid() throws ConfigurationException { - SaveModel sm = new SaveModel(); - sm.setSaveModel(true); - sm.setOutputModelDirectory(null); - sm.validate(); + @Test + public void isConfigurationValid_saveFalseDefaultDir_valid() { + assertTrue(saveFalseDefaultDir.isConfigurationValid()); } @Test - public void validate_saveModelFalseDefaultDir_valid() throws ConfigurationException { - SaveModel sm = new SaveModel(); - sm.setSaveModel(false); - sm.setOutputModelDirectory(defaultDir); - sm.validate(); + public void isConfigurationValid_saveFalseEmptyDir_valid() { + assertTrue(saveFalseEmptyDir.isConfigurationValid()); } @Test - public void validate_saveModelFalseValidDir_valid() throws ConfigurationException { - SaveModel sm = new SaveModel(); - sm.setSaveModel(false); - sm.setOutputModelDirectory(dir); - sm.validate(); + public void isConfigurationValid_saveFalseNullDir_valid() { + assertTrue(saveFalseNullDir.isConfigurationValid()); } @Test - public void validate_saveModelFalseEmptyDir_valid() throws ConfigurationException { - SaveModel sm = new SaveModel(); - sm.setSaveModel(false); - sm.setOutputModelDirectory(""); - sm.validate(); + public void isDirectoryValid_nonExistingDirectory_valid() throws IOException { + File nonExistingDirectory = tempFolder.newFolder("folderPath"); + nonExistingDirectory.delete(); + SaveModel temp = new SaveModel(true, nonExistingDirectory.getAbsolutePath()); + assertTrue(temp.isDirectoryValid()); } @Test - public void validate_saveModelFalseNullDir_valid() throws ConfigurationException { - SaveModel sm = new SaveModel(); - sm.setSaveModel(false); - sm.setOutputModelDirectory(null); - sm.validate(); + public void isDirectoryValid_existingDirectory_valid() throws IOException { + File existingDirectory = tempFolder.newFolder("folderPath"); + existingDirectory.mkdir(); + SaveModel temp = new SaveModel(true, existingDirectory.getAbsolutePath()); + assertTrue(temp.isDirectoryValid()); } @Test - public void printConfiguration_saveModelTrueDefaultDir_valid() { - SaveModel sm = new SaveModel(); - sm.setSaveModel(true); - sm.setOutputModelDirectory(defaultDir); - assertEquals(String.format(outputConfigurationBase, true, defaultDir), captureOutputPrintConfiguration(sm)); + public void isDirectoryValid_existingFile_nonValid() throws IOException { + File existingFile = tempFolder.newFile("filePath"); + existingFile.createNewFile(); + SaveModel temp = new SaveModel(true, existingFile.getAbsolutePath()); + assertFalse(temp.isDirectoryValid()); } @Test - public void printConfiguration_saveModelTrueValidDir_valid() { - // default configuration - SaveModel sm = new SaveModel(); - sm.setSaveModel(false); - sm.setOutputModelDirectory(dir); - assertEquals(String.format(outputConfigurationBase, false, dir), captureOutputPrintConfiguration(sm)); + public void mkdir_saveTrueNonExistingDirectory() throws IOException { + File subDirectory = tempFolder.newFolder("folderPath", "subFolder"); + subDirectory.delete(); + SaveModel temp = new SaveModel(true, subDirectory.getAbsolutePath()); + temp.mkdir(); + assertTrue(subDirectory.isDirectory()); } @Test - public void printConfiguration_saveModelTrueEmptyDir_notValid() { - SaveModel sm = new SaveModel(); - sm.setSaveModel(true); - sm.setOutputModelDirectory(""); - assertEquals(String.format(outputConfigurationBase, true, ""), captureOutputPrintConfiguration(sm)); + public void mkdir_saveTrueExistingDirectory() throws IOException { + File subDirectory = tempFolder.newFolder("folderPath", "subFolder"); + subDirectory.mkdirs(); + SaveModel temp = new SaveModel(true, subDirectory.getAbsolutePath()); + temp.mkdir(); + assertTrue(subDirectory.isDirectory()); } @Test - public void printConfiguration_saveModelTrueNullDir_notValid() { - SaveModel sm = new SaveModel(); - sm.setSaveModel(true); - sm.setOutputModelDirectory(null); - assertEquals(String.format(outputConfigurationBase, true, null), captureOutputPrintConfiguration(sm)); + public void mkdir_saveFalse() throws IOException { + File folder = tempFolder.newFile("validNonExistingFolder"); + folder.delete(); + SaveModel temp = new SaveModel(false, folder.getAbsolutePath()); + temp.mkdir(); + assertFalse(folder.exists()); } @Test - public void printConfiguration_saveModelFalseDefaultDir_valid() { - SaveModel sm = new SaveModel(); - sm.setSaveModel(false); - sm.setOutputModelDirectory(defaultDir); - assertEquals(String.format(outputConfigurationBase, false, defaultDir), captureOutputPrintConfiguration(sm)); + public void printConfiguration_saveTrueDefaultDir() { + assertEquals(String.format(outputConfigurationBase, true, defaultDir), + captureOutputPrintConfiguration(saveTrueDefaultDir)); } @Test - public void printConfiguration_saveModelFalseValidDir_valid() { - SaveModel sm = new SaveModel(); - sm.setSaveModel(false); - sm.setOutputModelDirectory(dir); - assertEquals(String.format(outputConfigurationBase, false, dir), captureOutputPrintConfiguration(sm)); + public void printConfiguration_saveTrueEmptyDir() { + assertEquals(String.format(outputConfigurationBase, true, ""), + captureOutputPrintConfiguration(saveTrueEmptyDir)); } @Test - public void printConfiguration_saveModelFalseEmptyDir_valid() { - SaveModel sm = new SaveModel(); - sm.setSaveModel(false); - sm.setOutputModelDirectory(""); - assertEquals(String.format(outputConfigurationBase, false, ""), captureOutputPrintConfiguration(sm)); + public void printConfiguration_saveTrueNullDir() { + assertEquals(String.format(outputConfigurationBase, true, null), + captureOutputPrintConfiguration(saveTrueNullDir)); } @Test - public void printConfiguration_saveModelFalseNullDir_valid() { - SaveModel sm = new SaveModel(); - sm.setSaveModel(false); - sm.setOutputModelDirectory(null); - assertEquals(String.format(outputConfigurationBase, false, null), captureOutputPrintConfiguration(sm)); + public void printConfiguration_saveFalseDefaultDir() { + assertEquals(String.format(outputConfigurationBase, false, defaultDir), + captureOutputPrintConfiguration(saveFalseDefaultDir)); } @Test - public void prepare_saveModelTrueValidDir() { - SaveModel sm = new SaveModel(); - sm.setSaveModel(true); - sm.setOutputModelDirectory(tempDir); - sm.prepare(); - File f = new File(tempDir); - assert (f.exists() && f.isDirectory()); - f.delete(); + public void printConfiguration_saveFalseEmptyDir() { + assertEquals(String.format(outputConfigurationBase, false, ""), + captureOutputPrintConfiguration(saveFalseEmptyDir)); } @Test - public void prepare_saveModelFalseValidDir() { - SaveModel sm = new SaveModel(); - sm.setSaveModel(false); - sm.setOutputModelDirectory(dir); - sm.prepare(); - File f = new File(tempDir); - assertFalse(f.exists()); + public void printConfiguration_saveFalseNullDir() { + assertEquals(String.format(outputConfigurationBase, false, null), + captureOutputPrintConfiguration(saveFalseNullDir)); } @Test public void setSaveModel_and_isSaveModel() { SaveModel sm = new SaveModel(); sm.setSaveModel(true); - assertEquals(true, sm.isSaveModel()); + assertTrue(sm.isSaveModel()); sm.setSaveModel(false); - assertEquals(false, sm.isSaveModel()); + assertFalse(sm.isSaveModel()); } @Test @@ -203,10 +181,8 @@ public void setOutputModelDirectory_and_getOutputModelDirectory() { SaveModel sm = new SaveModel(); sm.setOutputModelDirectory(""); assertEquals("", sm.getOutputModelDirectory()); - sm.setOutputModelDirectory(dir); - assertEquals(dir, sm.getOutputModelDirectory()); sm.setOutputModelDirectory(null); - assertEquals(null, sm.getOutputModelDirectory()); + assertNull(sm.getOutputModelDirectory()); } private String captureOutputPrintConfiguration(SaveModel sm) { diff --git a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveQueryResultsTest.java b/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveQueryResultsTest.java index 3c8c3d5d7..65988e564 100644 --- a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveQueryResultsTest.java +++ b/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveQueryResultsTest.java @@ -2,12 +2,15 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; import java.io.ByteArrayOutputStream; import java.io.File; +import java.io.IOException; import java.io.PrintStream; -import javax.naming.ConfigurationException; +import org.junit.Rule; /*- * #%L @@ -30,182 +33,155 @@ */ import org.junit.Test; +import org.junit.rules.TemporaryFolder; public class SaveQueryResultsTest { private final String outputConfigurationBase = " --save-query-results: %b\n --output-query-result-directory: %s\n"; - private final String dir = "directory"; - private final String tempDir = "tempDir"; private final String defaultDir = "query-results"; + private final SaveQueryResults saveTrueDefaultDir = new SaveQueryResults(true, defaultDir); + private final SaveQueryResults saveTrueEmptyDir = new SaveQueryResults(true, ""); + private final SaveQueryResults saveTrueNullDir = new SaveQueryResults(true, null); + private final SaveQueryResults saveFalseDefaultDir = new SaveQueryResults(false, defaultDir); + private final SaveQueryResults saveFalseEmptyDir = new SaveQueryResults(false, ""); + private final SaveQueryResults saveFalseNullDir = new SaveQueryResults(false, null); + + @Rule + public TemporaryFolder tempFolder = new TemporaryFolder(); + @Test - public void validate_saveQueryResultsTrueDefaultDir_valid() throws ConfigurationException { - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(true); - srq.setOutputQueryResultDirectory(defaultDir); - srq.validate(); + public void isConfigurationValid_saveTrueDefaultDir_valid() { + assertTrue(saveTrueDefaultDir.isConfigurationValid()); } @Test - public void validate_saveModelTrueValidDir_valid() throws ConfigurationException { - // default configuration - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(false); - srq.setOutputQueryResultDirectory(dir); - srq.validate(); + public void isConfigurationValid_saveTrueEmptyDir_notValid() { + assertFalse(saveTrueEmptyDir.isConfigurationValid()); } - @Test(expected = ConfigurationException.class) - public void validate_saveModelTrueEmptyDir_notValid() throws ConfigurationException { - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(true); - srq.setOutputQueryResultDirectory(""); - srq.validate(); + @Test + public void isConfigurationValid_saveTrueNullDir_notValid() { + assertFalse(saveTrueNullDir.isConfigurationValid()); } - @Test(expected = ConfigurationException.class) - public void validate_saveModelTrueNullDir_notValid() throws ConfigurationException { - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(true); - srq.setOutputQueryResultDirectory(null); - srq.validate(); + @Test + public void isConfigurationValid_saveFalseDefaultDir_valid() { + assertTrue(saveFalseDefaultDir.isConfigurationValid()); } @Test - public void validate_saveModelFalseDefaultDir_valid() throws ConfigurationException { - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(false); - srq.setOutputQueryResultDirectory(defaultDir); - srq.validate(); + public void isConfigurationValid_saveFalseEmptyDir_valid() { + assertTrue(saveFalseEmptyDir.isConfigurationValid()); } @Test - public void validate_saveModelFalseValidDir_valid() throws ConfigurationException { - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(false); - srq.setOutputQueryResultDirectory(dir); - srq.validate(); + public void isConfigurationValid_saveFalseNullDir_valid() { + assertTrue(saveFalseNullDir.isConfigurationValid()); } @Test - public void validate_saveModelFalseEmptyDir_valid() throws ConfigurationException { - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(false); - srq.setOutputQueryResultDirectory(""); - srq.validate(); + public void isDirectoryValid_nonExistingDirectory_valid() throws IOException { + File nonExistingDirectory = tempFolder.newFolder("folderPath"); + nonExistingDirectory.delete(); + SaveQueryResults temp = new SaveQueryResults(true, nonExistingDirectory.getAbsolutePath()); + assertTrue(temp.isDirectoryValid()); } @Test - public void validate_saveModelFalseNullDir_valid() throws ConfigurationException { - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(false); - srq.setOutputQueryResultDirectory(null); - srq.validate(); + public void isDirectoryValid_existingDirectory_valid() throws IOException { + File existingDirectory = tempFolder.newFolder("folderPath"); + existingDirectory.mkdir(); + SaveQueryResults temp = new SaveQueryResults(true, existingDirectory.getAbsolutePath()); + assertTrue(temp.isDirectoryValid()); } @Test - public void printConfiguration_saveModelTrueDefaultDir_valid() { - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(true); - srq.setOutputQueryResultDirectory(defaultDir); - assertEquals(String.format(outputConfigurationBase, true, defaultDir), captureOutputPrintConfiguration(srq)); + public void isDirectoryValid_existingFile_nonValid() throws IOException { + File existingFile = tempFolder.newFile("filePath"); + existingFile.createNewFile(); + SaveQueryResults temp = new SaveQueryResults(true, existingFile.getAbsolutePath()); + assertFalse(temp.isDirectoryValid()); } @Test - public void printConfiguration_saveModelTrueValidDir_valid() { - // default configuration - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(false); - srq.setOutputQueryResultDirectory(dir); - assertEquals(String.format(outputConfigurationBase, false, dir), captureOutputPrintConfiguration(srq)); + public void mkdir_saveTrueNonExistingDirectory() throws IOException { + File subDirectory = tempFolder.newFolder("folderPath", "subFolder"); + subDirectory.delete(); + SaveQueryResults temp = new SaveQueryResults(true, subDirectory.getAbsolutePath()); + temp.mkdir(); + assertTrue(subDirectory.isDirectory()); } @Test - public void printConfiguration_saveModelTrueEmptyDir_notValid() { - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(true); - srq.setOutputQueryResultDirectory(""); - assertEquals(String.format(outputConfigurationBase, true, ""), captureOutputPrintConfiguration(srq)); + public void mkdir_saveTrueExistingDirectory() throws IOException { + File subDirectory = tempFolder.newFolder("folderPath", "subFolder"); + subDirectory.mkdirs(); + SaveQueryResults temp = new SaveQueryResults(true, subDirectory.getAbsolutePath()); + temp.mkdir(); + assertTrue(subDirectory.isDirectory()); } @Test - public void printConfiguration_saveModelTrueNullDir_notValid() { - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(true); - srq.setOutputQueryResultDirectory(null); - assertEquals(String.format(outputConfigurationBase, true, null), captureOutputPrintConfiguration(srq)); + public void mkdir_saveFalse() throws IOException { + File folder = tempFolder.newFile("validNonExistingFolder"); + folder.delete(); + SaveQueryResults temp = new SaveQueryResults(false, folder.getAbsolutePath()); + temp.mkdir(); + assertFalse(folder.exists()); } @Test - public void printConfiguration_saveModelFalseDefaultDir_valid() { - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(false); - srq.setOutputQueryResultDirectory(defaultDir); - assertEquals(String.format(outputConfigurationBase, false, defaultDir), captureOutputPrintConfiguration(srq)); + public void printConfiguration_saveTrueDefaultDir() { + assertEquals(String.format(outputConfigurationBase, true, defaultDir), + captureOutputPrintConfiguration(saveTrueDefaultDir)); } @Test - public void printConfiguration_saveModelFalseValidDir_valid() { - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(false); - srq.setOutputQueryResultDirectory(dir); - assertEquals(String.format(outputConfigurationBase, false, dir), captureOutputPrintConfiguration(srq)); + public void printConfiguration_saveTrueEmptyDir() { + assertEquals(String.format(outputConfigurationBase, true, ""), + captureOutputPrintConfiguration(saveTrueEmptyDir)); } @Test - public void printConfiguration_saveModelFalseEmptyDir_valid() { - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(false); - srq.setOutputQueryResultDirectory(""); - assertEquals(String.format(outputConfigurationBase, false, ""), captureOutputPrintConfiguration(srq)); + public void printConfiguration_saveTrueNullDir() { + assertEquals(String.format(outputConfigurationBase, true, null), + captureOutputPrintConfiguration(saveTrueNullDir)); } @Test - public void printConfiguration_saveModelFalseNullDir_valid() { - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(false); - srq.setOutputQueryResultDirectory(null); - assertEquals(String.format(outputConfigurationBase, false, null), captureOutputPrintConfiguration(srq)); + public void printConfiguration_saveFalseDefaultDir() { + assertEquals(String.format(outputConfigurationBase, false, defaultDir), + captureOutputPrintConfiguration(saveFalseDefaultDir)); } @Test - public void prepare_saveModelTrueValidDir() { - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(true); - srq.setOutputQueryResultDirectory(tempDir); - srq.prepare(); - File f = new File(tempDir); - assert (f.exists() && f.isDirectory()); - f.delete(); + public void printConfiguration_saveFalseEmptyDir() { + assertEquals(String.format(outputConfigurationBase, false, ""), + captureOutputPrintConfiguration(saveFalseEmptyDir)); } @Test - public void prepare_saveModelFalseValidDir() { - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(false); - srq.setOutputQueryResultDirectory(dir); - srq.prepare(); - File f = new File(tempDir); - assertFalse(f.exists()); + public void printConfiguration_saveFalseNullDir() { + assertEquals(String.format(outputConfigurationBase, false, null), + captureOutputPrintConfiguration(saveFalseNullDir)); } @Test public void setSaveResults_and_isSaveResults() { SaveQueryResults srq = new SaveQueryResults(); srq.setSaveResults(true); - assertEquals(true, srq.isSaveResults()); + assertTrue(srq.isSaveResults()); srq.setSaveResults(false); - assertEquals(false, srq.isSaveResults()); + assertFalse(srq.isSaveResults()); } @Test - public void setOutputQueryResultDirectory_and_getOutputModelDirectory() { + public void setOutputQueryResultDirectory_and_getOutputQueryResultsDirectory() { SaveQueryResults srq = new SaveQueryResults(); srq.setOutputQueryResultDirectory(""); assertEquals("", srq.getOutputQueryResultDirectory()); - srq.setOutputQueryResultDirectory(dir); - assertEquals(dir, srq.getOutputQueryResultDirectory()); srq.setOutputQueryResultDirectory(null); - assertEquals(null, srq.getOutputQueryResultDirectory()); + assertNull(srq.getOutputQueryResultDirectory()); } private String captureOutputPrintConfiguration(SaveQueryResults srq) { From 5b269b27d882a9f9b82e86de4e2fb46eec837aa7 Mon Sep 17 00:00:00 2001 From: alloka Date: Wed, 30 Oct 2019 22:17:48 +0100 Subject: [PATCH 0334/1003] added some tests and modified Serializer --- .../core/model/implementation/Serializer.java | 15 +++++--- .../vlog4j/core/model/FactTest.java | 9 +++++ .../vlog4j/core/model/RuleImplTest.java | 20 +++++++++-- .../vlog4j/core/model/TermImplTest.java | 36 +++++++++++++++++++ 4 files changed, 73 insertions(+), 7 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index c4b6fc118..091b5e9fe 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -38,7 +38,11 @@ * @author Ali Elhalawati * */ -public class Serializer { +public final class Serializer { + + private Serializer() { + + } public static String getRuleString(RuleImpl rule) { return rule.getHead() + " :- " + rule.getBody() + "."; @@ -83,15 +87,16 @@ public static String getDatatypeConstantString(DatatypeConstantImpl datatypecons return datatypeconstant.getName(); } + public static String getNamedNullString(NamedNullImpl namednull) { + return "_" + namednull.getName(); + } + public static String getLanguageConstantString(LanguageStringConstantImpl languagestringconstant) { return languagestringconstant.getName(); } - public static String getNamedNullString(NamedNullImpl namednull) { - return "_" + namednull.toString(); - } public static String getPredicateString(Predicate predicate) { - return " Predicate [ name= " + predicate.getName() + ", arity= " + predicate.getArity() + "]"; + return " Predicate [ name= " + predicate.getName() + ", arity= " + predicate.getArity() + "]"; } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java index 9301dcff8..e9b29787e 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java @@ -54,4 +54,13 @@ public void factsOnlyContainConstants() { new FactImpl(p, Arrays.asList(x)); } + @Test + public void testtoString() { + final Predicate p = Expressions.makePredicate("p", 2); + final Constant c = Expressions.makeAbstractConstant("c"); + final Constant d = Expressions.makeAbstractConstant("d"); + final Fact f1 = Expressions.makeFact(p, Arrays.asList(c, d)); + assertEquals("p(c, d)", f1.toString()); + } + } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java index be3cee765..7c05f8b61 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java @@ -78,7 +78,6 @@ public void testEquals() { final Rule rule8 = Expressions.makePositiveLiteralsRule(headPositiveLiterals, bodyPositiveLiterals); assertEquals(rule1, rule1); - assertEquals(rule1.toString(),"q(?X, !Y) :- p(?X, c), p(?X, ?Z)."); assertEquals(rule2, rule1); assertEquals(rule2.hashCode(), rule1.hashCode()); @@ -96,7 +95,7 @@ public void testEquals() { assertNotEquals(rule5, rule1); assertFalse(rule1.equals(null)); assertFalse(rule1.equals(c)); - + } @Test(expected = IllegalArgumentException.class) @@ -139,4 +138,21 @@ public void noUnsafeVariables() { Expressions.makeRule(literal1, literal2); } + @Test + public void testtoString() { + final Variable x = Expressions.makeUniversalVariable("X"); + final Variable y = Expressions.makeExistentialVariable("Y"); + final Variable z = Expressions.makeUniversalVariable("Z"); + final Constant c = Expressions.makeAbstractConstant("c"); + final PositiveLiteral atom1 = Expressions.makePositiveLiteral("p", x, c); + final PositiveLiteral atom2 = Expressions.makePositiveLiteral("p", x, z); + final PositiveLiteral headAtom1 = Expressions.makePositiveLiteral("q", x, y); + final Conjunction bodyLiterals = Expressions.makeConjunction(atom1, atom2); + final Conjunction headPositiveLiterals = Expressions.makePositiveConjunction(headAtom1); + final Conjunction bodyPositiveLiterals = Expressions.makePositiveConjunction(atom1, atom2); + final Rule rule1 = new RuleImpl(headPositiveLiterals, bodyLiterals); + assertEquals("q(?X, !Y) :- p(?X, c), p(?X, ?Z).", rule1.toString()); + + } + } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java index 0c11dd8d6..1c9e98f1f 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java @@ -132,6 +132,42 @@ public void namedNullGetterTest() { assertEquals(TermType.NAMED_NULL, n.getType()); } + @Test + public void testabstractConstantImpltoString() { + AbstractConstantImpl c = new AbstractConstantImpl("c"); + assertEquals("c", c.toString()); + } + + @Test + public void testdatatypeConstantImpltoString() { + DatatypeConstantImpl c = new DatatypeConstantImpl("c", "http://example.org/mystring"); + assertEquals("\"c\"^^", c.toString()); + } + + @Test + public void testlanguageStringConstantImpltoString() { + LanguageStringConstantImpl c = new LanguageStringConstantImpl("Test", "en"); + assertEquals("\"Test\"@en", c.toString()); + } + + @Test + public void testuniversalVariabletoString() { + UniversalVariableImpl v = new UniversalVariableImpl("v"); + assertEquals("?v", v.toString()); + } + + @Test + public void testexistentialVariabletoString() { + ExistentialVariableImpl v = new ExistentialVariableImpl("v"); + assertEquals("!v", v.toString()); + } + + @Test + public void testnamedNulltoString() { + NamedNullImpl n = new NamedNullImpl("123"); + assertEquals("_123", n.toString()); + } + @Test(expected = NullPointerException.class) public void constantNameNonNullTest() { new AbstractConstantImpl((String) null); From ec4e0a7fcbc505cfea4c3d60248189d3cf150e75 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Thu, 31 Oct 2019 13:18:31 +0100 Subject: [PATCH 0335/1003] delete unused class --- .../picocli/VLog4jClientTestAcyclicity.java | 42 ------------------- 1 file changed, 42 deletions(-) delete mode 100644 vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientTestAcyclicity.java diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientTestAcyclicity.java b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientTestAcyclicity.java deleted file mode 100644 index 805811165..000000000 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientTestAcyclicity.java +++ /dev/null @@ -1,42 +0,0 @@ -package org.vlog4j.client.picocli; - -/*- - * #%L - * VLog4j Client - * %% - * Copyright (C) 2018 - 2019 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import picocli.CommandLine.Command; - -@Command(name = "testacyclicity", description = "Test if the rule set satisfies any acyclicity notion") -public class VLog4jClientTestAcyclicity implements Runnable { - -// TODO implement the following method -// @Option(names = "--acyclicity-notion", required = false, description = "Acyclicity notion. One of:JA (Joint Acyclicity), RJA (Restricted Joint Acyclicity), RFA (Model-Faithful Acyclicity), RMFA (Restricted Model-Faithful Acyclicity). All by default.") -// String acyclicityNotion; - -// TODO implement the following method -// @Option(names = "--rule-file", description = "Rule file in rls syntax", required = true) -// private String rulePath; - - @Override - public void run() { - System.err.println("Not implemented yet."); - System.err.println("Exiting the program."); - } - -} From 6b8b41d1c30022c47a454f59bead7d77060f0a06 Mon Sep 17 00:00:00 2001 From: alloka Date: Thu, 31 Oct 2019 13:58:12 +0100 Subject: [PATCH 0336/1003] fixed fact tostring --- .../vlog4j/core/model/implementation/FactImpl.java | 6 ++++++ .../vlog4j/core/model/implementation/Serializer.java | 4 ---- .../java/org/semanticweb/vlog4j/core/model/FactTest.java | 2 +- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java index cb79676f8..b08939577 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java @@ -47,5 +47,11 @@ public FactImpl(Predicate predicate, List terms) { public T accept(StatementVisitor statementVisitor) { return statementVisitor.visit(this); } + + @Override + public String toString() { + return Serializer.getLiteralString(this)+"."; + } + } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 091b5e9fe..01496eb18 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -67,10 +67,6 @@ public static String getLiteralString(AbstractLiteralImpl literal) { return stringBuilder.toString(); } - public static String getFactString(FactImpl fact) { - return fact.toString() + "."; - } - public static String getConstantString(AbstractConstantImpl constant) { return constant.getName(); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java index e9b29787e..d2da2f1de 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java @@ -60,7 +60,7 @@ public void testtoString() { final Constant c = Expressions.makeAbstractConstant("c"); final Constant d = Expressions.makeAbstractConstant("d"); final Fact f1 = Expressions.makeFact(p, Arrays.asList(c, d)); - assertEquals("p(c, d)", f1.toString()); + assertEquals("p(c, d).", f1.toString()); } } From 30c5a83c67fbda6f7c53ec5b835f5e6ef791edf8 Mon Sep 17 00:00:00 2001 From: alloka Date: Thu, 31 Oct 2019 14:19:14 +0100 Subject: [PATCH 0337/1003] added tests for conjunction and datasourcedeclaration --- .../vlog4j/core/model/ConjunctionImplTest.java | 15 +++++++++++++++ .../core/model/DataSourceDeclarationTest.java | 13 ++++++++++++- 2 files changed, 27 insertions(+), 1 deletion(-) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java index cb787b901..4df0c8f68 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java @@ -181,4 +181,19 @@ public void negativeLiteralsNoNullElements() { Expressions.makeConjunction(negativeLiteralList); } + @Test + public void testconjunctiontoString() { + final Variable x = Expressions.makeUniversalVariable("X"); + final Variable y = Expressions.makeUniversalVariable("Y"); + final Constant c = Expressions.makeAbstractConstant("c"); + final Constant d = Expressions.makeAbstractConstant("d"); + final PositiveLiteral positiveLiteral1 = Expressions.makePositiveLiteral("p", x, c); + final PositiveLiteral positiveLiteral2 = Expressions.makePositiveLiteral("p", y, x); + final PositiveLiteral positiveLiteral3 = Expressions.makePositiveLiteral("q", x, d); + final List positiveLiteralList = Arrays.asList(positiveLiteral1, positiveLiteral2, + positiveLiteral3); + final Conjunction conjunction1 = new ConjunctionImpl<>(positiveLiteralList); + assertEquals("p(?X, c), p(?Y, ?X), q(?X, d)", conjunction1.toString()); + } + } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java index c718ccd1a..4e35fcf32 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java @@ -41,7 +41,6 @@ public void equalityTest() throws MalformedURLException { "?var wdt:P31 wd:Q5 ."); Predicate predicate1 = Expressions.makePredicate("p", 3); DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource1); - DataSource dataSource2 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var", "?var wdt:P31 wd:Q5 ."); Predicate predicate2 = Expressions.makePredicate("p", 3); @@ -63,4 +62,16 @@ public void equalityTest() throws MalformedURLException { assertFalse(dataSourceDeclaration1.equals(null)); // written like this for recording coverage properly } + @Test + public void testdataSourceDecalarationtoString() throws MalformedURLException { + DataSource dataSource1 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var", + "?var wdt:P31 wd:Q5 ."); + Predicate predicate1 = Expressions.makePredicate("p", 3); + DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource1); + DataSource dataSource2 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var", + "?var wdt:P31 wd:Q5 ."); + Predicate predicate2 = Expressions.makePredicate("p", 3); + DataSourceDeclaration dataSourceDeclaration2 = new DataSourceDeclarationImpl(predicate2, dataSource2); + assertEquals(dataSourceDeclaration1.toString(), dataSourceDeclaration2.toString()); + } } From f6f615420ba60158795ee9cdeb3dc6415748b250 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 4 Nov 2019 11:49:58 +0100 Subject: [PATCH 0338/1003] add methods to to query the number of results in a query --- .../vlog4j/core/reasoner/Reasoner.java | 102 ++++++++++++++++++ .../reasoner/implementation/VLogReasoner.java | 50 +++++++-- 2 files changed, 143 insertions(+), 9 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index f391bbe80..6c274b820 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -330,6 +330,108 @@ public static Reasoner getInstance() { */ QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNulls); + // TODO add examples to query javadoc + /** + * Evaluates an atomic ({@code query}), and returns the number of implicit facts + * loaded into the reasoner and the explicit facts materialised by the reasoner, + * including nulls.
    + * An answer to the query is the terms a fact that matches the {@code query}: + * the fact predicate is the same as the {@code query} predicate, the + * {@link TermType#CONSTANT} terms of the {@code query} appear in the answer + * fact at the same term position, and the {@link TermType#VARIABLE} terms of + * the {@code query} are matched by terms in the fact, either named + * ({@link TermType#CONSTANT}) or anonymous ({@link TermType#NAMED_NULL}). The + * same variable name identifies the same term in the answer fact.
    + * A query answer is represented by a {@link QueryResult}. A query can have + * multiple, distinct query answers. This method returns an Iterator over these + * answers.
    + * + * Depending on the state of the reasoning (materialisation) and its + * {@link KnowledgeBase}, the answers can have a different {@link Correctness} + * ({@link QueryResultIterator#getCorrectness()}): + *
      + *
    • If {@link Correctness#SOUND_AND_COMPLETE}, materialisation over current + * knowledge base has completed, and the query answers are guaranteed to be + * correct.
    • + *
    • If {@link Correctness#SOUND_BUT_INCOMPLETE}, the results are guaranteed + * to be sound, but may be incomplete. This can happen + *
        + *
      • when materialisation has not completed ({@link Reasoner#reason()} returns + * {@code false}),
      • + *
      • or when the knowledge base was modified after reasoning, and the + * materialisation does not reflect the current knowledge base. + * Re-materialisation ({@link Reasoner#reason()}) is required in order to obtain + * complete query answers with respect to the current knowledge base.
      • + *
      + *
    • + *
    • If {@link Correctness#INCORRECT}, the results may be incomplete, and some + * results may be unsound. This can happen when the knowledge base was modified + * and the reasoner materialisation is no longer consistent with the current + * knowledge base. Re-materialisation ({@link Reasoner#reason()}) is required, + * in order to obtain correct query answers. + *
    + * + * + * @param query a {@link PositiveLiteral} representing the query to be answered. + * @return number of facts in the extension of the query. + */ + long queryAnswerSize(PositiveLiteral query); + + // TODO add examples to query javadoc + /** + * Evaluates an atomic ({@code query}), and returns the number of implicit facts + * loaded into the reasoner and the explicit facts materialised by the reasoner. + *
    + * An answer to the query is the terms a fact that matches the {@code query}: + * the fact predicate is the same as the {@code query} predicate, the + * {@link TermType#CONSTANT} terms of the {@code query} appear in the answer + * fact at the same term position, and the {@link TermType#VARIABLE} terms of + * the {@code query} are matched by terms in the fact, either named + * ({@link TermType#CONSTANT}) or anonymous ({@link TermType#NAMED_NULL}). The + * same variable name identifies the same term in the answer fact.
    + * A query answer is represented by a {@link QueryResult}. A query can have + * multiple, distinct query answers. This method returns an Iterator over these + * answers.
    + * + * Depending on the state of the reasoning (materialisation) and its + * {@link KnowledgeBase}, the answers can have a different {@link Correctness} + * ({@link QueryResultIterator#getCorrectness()}): + *
      + *
    • If {@link Correctness#SOUND_AND_COMPLETE}, materialisation over current + * knowledge base has completed, and the query answers are guaranteed to be + * correct.
    • + *
    • If {@link Correctness#SOUND_BUT_INCOMPLETE}, the results are guaranteed + * to be sound, but may be incomplete. This can happen + *
        + *
      • when materialisation has not completed ({@link Reasoner#reason()} returns + * {@code false}),
      • + *
      • or when the knowledge base was modified after reasoning, and the + * materialisation does not reflect the current knowledge base. + * Re-materialisation ({@link Reasoner#reason()}) is required in order to obtain + * complete query answers with respect to the current knowledge base.
      • + *
      + *
    • + *
    • If {@link Correctness#INCORRECT}, the results may be incomplete, and some + * results may be unsound. This can happen when the knowledge base was modified + * and the reasoner materialisation is no longer consistent with the current + * knowledge base. Re-materialisation ({@link Reasoner#reason()}) is required, + * in order to obtain correct query answers. + *
    + * + * + * @param query a {@link PositiveLiteral} representing the query to be + * answered. + * @param includeNulls if {@code true}, {@link QueryResult}s containing terms of + * type {@link TermType#NAMED_NULL} (representing anonymous + * individuals introduced to satisfy rule existentially + * quantified variables) will be included. Otherwise, the + * answers will only contain the {@link QueryResult}s with + * terms of type {@link TermType#CONSTANT} (representing + * named individuals). + * @return number of facts in the extension of the query. + */ + long queryAnswerSize(PositiveLiteral query, boolean includeNulls); + // TODO add examples to query javadoc /** * Evaluates an atomic query ({@code query}) on the implicit facts loaded into diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index d355e17f2..33f1f428f 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -569,14 +569,14 @@ private void runChase() { } @Override - public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBlanks) { + public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNulls) { validateNotClosed(); if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); } Validate.notNull(query, "Query atom must not be null!"); - final boolean filterBlanks = !includeBlanks; + final boolean filterBlanks = !includeNulls; final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); TermQueryResultIterator stringQueryResultIterator; @@ -594,6 +594,33 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBla return new VLogQueryResultIterator(stringQueryResultIterator, this.correctness); } + @Override + public long queryAnswerSize(PositiveLiteral query) { + return queryAnswerSize(query, true); + } + + @Override + public long queryAnswerSize(PositiveLiteral query, boolean includeNulls) { + validateNotClosed(); + validateKBLoaded("Querying is not alowed before reasoner is loaded!"); + Validate.notNull(query, "Query atom must not be null!"); + + final boolean filterBlanks = !includeNulls; + final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); + + int result = -1; + try { + result = this.vLog.querySize(vLogAtom, true, filterBlanks); + } catch (NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state.", e); + } catch (NonExistingPredicateException e) { + LOGGER.warn("Query uses predicate " + query.getPredicate() + + " that does not occur in the knowledge base. Answer must be empty!"); + return 0; + } + return result; + } + @Override public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final String csvFilePath, final boolean includeBlanks) throws IOException { @@ -740,19 +767,18 @@ public void onStatementsAdded(List statementsAdded) { // TODO more elaborate materialisation state handling updateReasonerToKnowledgeBaseChanged(); - - //updateCorrectnessOnStatementsAdded(statementsAdded); + + // updateCorrectnessOnStatementsAdded(statementsAdded); updateCorrectness(); } - @Override public void onStatementAdded(Statement statementAdded) { // TODO more elaborate materialisation state handling updateReasonerToKnowledgeBaseChanged(); - - //updateCorrectnessOnStatementAdded(statementAdded); + + // updateCorrectnessOnStatementAdded(statementAdded); updateCorrectness(); } @@ -766,9 +792,9 @@ private void updateReasonerToKnowledgeBaseChanged() { private void updateCorrectness() { if (this.reasonerState == ReasonerState.KB_CHANGED) { - + final boolean noRules = this.knowledgeBase.getRules().isEmpty(); - this.correctness = noRules? Correctness.SOUND_BUT_INCOMPLETE : Correctness.INCORRECT; + this.correctness = noRules ? Correctness.SOUND_BUT_INCOMPLETE : Correctness.INCORRECT; } } @@ -784,4 +810,10 @@ void validateNotClosed() throws ReasonerStateException { } } + void validateKBLoaded(String errorMessage) { + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { + throw new ReasonerStateException(this.reasonerState, errorMessage); + } + } + } From 4e058806ab50de6d32b173211834800ca261e33a Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 4 Nov 2019 11:51:24 +0100 Subject: [PATCH 0339/1003] remove helper methods: getQueryAnswerCount, iteratorSize --- .../vlog4j/examples/ExamplesUtils.java | 45 ------------------- 1 file changed, 45 deletions(-) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java index a21ef2999..906d3c368 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java @@ -112,51 +112,6 @@ public static void printOutQueryAnswers(final String queryString, final Reasoner } } - /** - * Returns the number of answers returned by {@code reasoner} to the query - * ({@code queryAtom}). - * - * @param queryAtom query to be answered - * @param reasoner reasoner to query on - */ - public static int getQueryAnswerCount(final PositiveLiteral queryAtom, final Reasoner reasoner) { - try (final QueryResultIterator answers = reasoner.answerQuery(queryAtom, true)) { - return iteratorSize(answers); - } - } - - /** - * Returns the number of answers returned by {@code reasoner} to the query - * ({@code queryAtom}). - * - * @param queryAtom query to be answered - * @param reasoner reasoner to query on - */ - public static int getQueryAnswerCount(final String queryString, final Reasoner reasoner) { - try { - final PositiveLiteral query = RuleParser.parsePositiveLiteral(queryString); - return getQueryAnswerCount(query, reasoner); - } catch (final ParsingException e) { - throw new RuntimeException(e.getMessage(), e); - } - } - - /** - * Returns the size of an iterator. - * - * @FIXME This is an inefficient way of counting results. It should be done at a - * lower level instead - * @param Iterator to iterate over - * @return number of elements in iterator - */ - private static int iteratorSize(final Iterator iterator) { - int size = 0; - for (; iterator.hasNext(); ++size) { - iterator.next(); - } - return size; - } - /** * Creates an Atom with @numberOfVariables distinct variables * From 59a50a9c20c19571f3db5f8ae462a4e020a1704f Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 4 Nov 2019 11:52:56 +0100 Subject: [PATCH 0340/1003] add test for native querySize --- .../implementation/QuerySizeTest.java | 262 ++++++++++++++++++ 1 file changed, 262 insertions(+) create mode 100644 vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QuerySizeTest.java diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QuerySizeTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QuerySizeTest.java new file mode 100644 index 000000000..0c2699f32 --- /dev/null +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QuerySizeTest.java @@ -0,0 +1,262 @@ +package org.semanticweb.vlog4j.core.reasoner.implementation; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; + +import java.io.IOException; + +import org.junit.Test; +import org.semanticweb.vlog4j.core.model.api.Conjunction; +import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.Literal; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Variable; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; + +public class QuerySizeTest { + + private static final Predicate predP = Expressions.makePredicate("P", 1); + private static final Predicate predQ = Expressions.makePredicate("Q", 1); + private static final Predicate predR = Expressions.makePredicate("R", 2); + private static final Variable x = Expressions.makeUniversalVariable("x"); + private static final Variable y = Expressions.makeExistentialVariable("y"); + private static final Constant c = Expressions.makeAbstractConstant("c"); + private static final Constant d = Expressions.makeAbstractConstant("d"); + private static final Constant e = Expressions.makeAbstractConstant("e"); + private static final Constant f = Expressions.makeAbstractConstant("f"); + + private static final PositiveLiteral Px = Expressions.makePositiveLiteral(predP, x); + private static final PositiveLiteral Qx = Expressions.makePositiveLiteral(predQ, x); + private static final PositiveLiteral Qy = Expressions.makePositiveLiteral(predQ, y); + private static final PositiveLiteral Rxy = Expressions.makePositiveLiteral(predR, x, y); + private static final Conjunction conRxyQy = Expressions.makePositiveConjunction(Rxy, Qy); + private static final Conjunction conPx = Expressions.makeConjunction(Px); + + private static final Rule ruleQxPx = Expressions.makeRule(Qx, Px); + private static final Rule RxyQyPx = Expressions.makeRule(conRxyQy, conPx); + + private static final Fact factPc = Expressions.makeFact(predP, c); + private static final Fact factPd = Expressions.makeFact(predP, d); + + private static final Fact factQe = Expressions.makeFact(predQ, e); + private static final Fact factQf = Expressions.makeFact(predQ, f); + + private static final PositiveLiteral Rdy = Expressions.makePositiveLiteral(predR, d, y); + private static final PositiveLiteral Rey = Expressions.makePositiveLiteral(predR, e, y); + private static final PositiveLiteral Rxd = Expressions.makePositiveLiteral(predR, x, d); + private static final PositiveLiteral Rxe = Expressions.makePositiveLiteral(predR, x, e); + + @Test + public void noFactsnoRules() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(0, reasoner.queryAnswerSize(Px, true)); + assertEquals(0, reasoner.queryAnswerSize(Qx, true)); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); + } + } + + @Test + public void noFactsUniversalRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(ruleQxPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(0, reasoner.queryAnswerSize(Px, true)); + assertEquals(0, reasoner.queryAnswerSize(Qx, true)); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); + } + } + + @Test + public void noFactsExistentialRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(RxyQyPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(0, reasoner.queryAnswerSize(Px, true)); + assertEquals(0, reasoner.queryAnswerSize(Qx, true)); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); + } + } + + @Test + public void pFactsNoRules() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(2, reasoner.queryAnswerSize(Px, true)); + assertEquals(0, reasoner.queryAnswerSize(Qx, true)); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); + } + } + + @Test + public void pFactsUniversalRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd, ruleQxPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(2, reasoner.queryAnswerSize(Px, true)); + assertEquals(2, reasoner.queryAnswerSize(Px, false)); + assertEquals(2, reasoner.queryAnswerSize(Qx, true)); + assertEquals(2, reasoner.queryAnswerSize(Qx, false)); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); + } + } + + @Test + public void pFactsExistentialRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd, RxyQyPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(2, reasoner.queryAnswerSize(Px)); + assertEquals(2, reasoner.queryAnswerSize(Px, true)); + assertEquals(2, reasoner.queryAnswerSize(Px, false)); + assertEquals(2, reasoner.queryAnswerSize(Qx)); + assertEquals(2, reasoner.queryAnswerSize(Qx, true)); + assertEquals(0, reasoner.queryAnswerSize(Qx, false)); + assertEquals(2, reasoner.queryAnswerSize(Rxy)); + assertEquals(2, reasoner.queryAnswerSize(Rxy, true)); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); + } + } + + @Test + public void qFactsUniversalRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factQe, factQf, RxyQyPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(0, reasoner.queryAnswerSize(Px)); + assertEquals(0, reasoner.queryAnswerSize(Px, true)); + assertEquals(0, reasoner.queryAnswerSize(Px, false)); + assertEquals(2, reasoner.queryAnswerSize(Qx)); + assertEquals(2, reasoner.queryAnswerSize(Qx, true)); + assertEquals(2, reasoner.queryAnswerSize(Qx, false)); + assertEquals(0, reasoner.queryAnswerSize(Rxy)); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); + } + } + + @Test + public void qFactsExistentialRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factQe, factQf, RxyQyPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(0, reasoner.queryAnswerSize(Px)); + assertEquals(0, reasoner.queryAnswerSize(Px, true)); + assertEquals(0, reasoner.queryAnswerSize(Px, false)); + assertEquals(2, reasoner.queryAnswerSize(Qx)); + assertEquals(2, reasoner.queryAnswerSize(Qx, true)); + assertEquals(2, reasoner.queryAnswerSize(Qx, false)); + assertEquals(0, reasoner.queryAnswerSize(Rxy)); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); + } + } + + @Test + public void pFactsQFactsUniversalRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd, factQe, factQf, ruleQxPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(2, reasoner.queryAnswerSize(Px)); + assertEquals(2, reasoner.queryAnswerSize(Px, true)); + assertEquals(2, reasoner.queryAnswerSize(Px, false)); + assertEquals(4, reasoner.queryAnswerSize(Qx)); + assertEquals(4, reasoner.queryAnswerSize(Qx, true)); + assertEquals(4, reasoner.queryAnswerSize(Qx, false)); + assertEquals(0, reasoner.queryAnswerSize(Rxy)); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); + } + } + + @Test + public void pFactsQFactsExistentialRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd, factQe, factQf, RxyQyPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(2, reasoner.queryAnswerSize(Px)); + assertEquals(2, reasoner.queryAnswerSize(Px, true)); + assertEquals(2, reasoner.queryAnswerSize(Px, false)); + assertEquals(4, reasoner.queryAnswerSize(Qx)); + assertEquals(4, reasoner.queryAnswerSize(Qx, true)); + assertEquals(2, reasoner.queryAnswerSize(Qx, false)); + assertEquals(2, reasoner.queryAnswerSize(Rxy)); + assertEquals(2, reasoner.queryAnswerSize(Rxy, true)); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); + + assertEquals(1, reasoner.queryAnswerSize(Rdy, true)); + assertEquals(0, reasoner.queryAnswerSize(Rey, true)); + assertEquals(0, reasoner.queryAnswerSize(Rxd, true)); + assertEquals(0, reasoner.queryAnswerSize(Rxe, true)); + + assertEquals(0, reasoner.queryAnswerSize(Rdy, false)); + assertEquals(0, reasoner.queryAnswerSize(Rey, false)); + assertEquals(0, reasoner.queryAnswerSize(Rxd, false)); + assertEquals(0, reasoner.queryAnswerSize(Rxe, false)); + } + } + + @Test + public void pFactsQFactsExistentialAndUniversalRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd, factQe, factQf, ruleQxPx, RxyQyPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(2, reasoner.queryAnswerSize(Px)); + assertEquals(2, reasoner.queryAnswerSize(Px, true)); + assertEquals(2, reasoner.queryAnswerSize(Px, false)); + assertEquals(6, reasoner.queryAnswerSize(Qx)); + assertEquals(6, reasoner.queryAnswerSize(Qx, true)); + assertEquals(4, reasoner.queryAnswerSize(Qx, false)); + assertEquals(2, reasoner.queryAnswerSize(Rxy)); + assertEquals(2, reasoner.queryAnswerSize(Rxy, true)); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); + + assertEquals(1, reasoner.queryAnswerSize(Rdy, true)); + assertEquals(0, reasoner.queryAnswerSize(Rey, true)); + assertEquals(0, reasoner.queryAnswerSize(Rxd, true)); + assertEquals(0, reasoner.queryAnswerSize(Rxe, true)); + + assertEquals(0, reasoner.queryAnswerSize(Rdy, false)); + assertEquals(0, reasoner.queryAnswerSize(Rey, false)); + assertEquals(0, reasoner.queryAnswerSize(Rxd, false)); + assertEquals(0, reasoner.queryAnswerSize(Rxe, false)); + } + } + +} From dde14c285ca50a1129c33c2a5d8a8945211bd701 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 4 Nov 2019 11:53:23 +0100 Subject: [PATCH 0341/1003] update code to use native queriSize --- .../client/picocli/VLog4jClientMaterialize.java | 3 +-- .../vlog4j/examples/CompareWikidataDBpedia.java | 6 +++--- .../vlog4j/examples/CountingTriangles.java | 17 ++++++++++------- .../vlog4j/examples/DoidExample.java | 5 +++-- .../examples/InMemoryGraphAnalysisExample.java | 10 ++++++---- .../SkolemVsRestrictedChaseTermination.java | 2 +- .../vlog4j/examples/graal/DoidExampleGraal.java | 5 +++-- 7 files changed, 27 insertions(+), 21 deletions(-) diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java index 7758a0065..6f4755f5c 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java +++ b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java @@ -227,8 +227,7 @@ private void doSaveQueryResults(Reasoner reasoner, PositiveLiteral query) { } private void doPrintResults(Reasoner reasoner, PositiveLiteral query) { - System.out.println( - "Number of query answers in " + query + ": " + ExamplesUtils.getQueryAnswerCount(query, reasoner)); + System.out.println("Number of query answers in " + query + ": " + reasoner.queryAnswerSize(query)); } private String queryOputputPath(PositiveLiteral query) { diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java index 7e1031f42..4bd0392c4 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java @@ -88,9 +88,9 @@ public static void main(final String[] args) throws ParsingException, IOExceptio try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - final int resultCount = ExamplesUtils.getQueryAnswerCount("result(?X)", reasoner); - final int wdCount = ExamplesUtils.getQueryAnswerCount("inWd(?X)", reasoner); - final int dbpCount = ExamplesUtils.getQueryAnswerCount("inDbp(?X)", reasoner); + final double resultCount = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("result(?X)")); + final double wdCount = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("inWd(?X)")); + final double dbpCount = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("inDbp(?X)")); System.out.println("Found " + resultCount + " matching entities overall, of which " + wdCount + " were in Wikidata and " + dbpCount + " were in DBPedia"); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java index 050671ebb..63e1cb98e 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java @@ -39,7 +39,7 @@ */ public class CountingTriangles { - public static void main(final String[] args) throws IOException { + public static void main(final String[] args) throws IOException, ParsingException { ExamplesUtils.configureLogging(); KnowledgeBase kb; @@ -61,15 +61,18 @@ public static void main(final String[] args) throws IOException { /* Initialise reasoner and compute inferences */ reasoner.reason(); - System.out.print("Found " + ExamplesUtils.getQueryAnswerCount("country(?X)", reasoner) - + " countries in Wikidata"); + final double countries = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("country(?X)")); + final double shareBorder = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("shareBorder(?X,?Y)")); + final double triangles = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("triangle(?X,?Y,?Z)")); + + System.out.print("Found " + countries + " countries in Wikidata"); // Due to symmetry, each joint border is found twice, hence we divide by 2: - System.out.println(", with " + (ExamplesUtils.getQueryAnswerCount("shareBorder(?X,?Y)", reasoner) / 2) - + " pairs of them sharing a border."); + System.out.println(", with " + (shareBorder / 2) + " pairs of them sharing a border."); // Due to symmetry, each triangle is found six times, hence we divide by 6: System.out.println("The number of triangles of countries that mutually border each other was " - + (ExamplesUtils.getQueryAnswerCount("triangle(?X,?Y,?Z)", reasoner) / 6) + "."); + + (triangles / 6) + "."); } } -} + +} \ No newline at end of file diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java index e237dc70c..96f573d87 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java @@ -44,7 +44,7 @@ */ public class DoidExample { - public static void main(final String[] args) throws IOException { + public static void main(final String[] args) throws IOException, ParsingException { ExamplesUtils.configureLogging(); /* Configure rules */ @@ -72,7 +72,8 @@ public static void main(final String[] args) throws IOException { final List queries = Arrays.asList("humansWhoDiedOfCancer(?X)", "humansWhoDiedOfNoncancer(?X)"); System.out.println("\nNumber of inferred tuples for selected query atoms:"); for (final String queryString : queries) { - System.out.println(" " + queryString + ": " + ExamplesUtils.getQueryAnswerCount(queryString, reasoner)); + double querySize = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral(queryString)); + System.out.println(" " + queryString + ": " + querySize); } } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java index 9e68b8406..87bc9927e 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java @@ -87,10 +87,12 @@ public static void main(final String[] args) throws ParsingException, IOExceptio try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - System.out.println("Number of vertices not reachable from vertex 1 by a bi-directional path: " - + ExamplesUtils.getQueryAnswerCount("unreachable(?X)", reasoner)); - System.out.println("Number of bi-directional triangles: " - + (ExamplesUtils.getQueryAnswerCount("triangle(?X,?Y,?Z)", reasoner) / 6)); + final double unreachable = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("unreachable(?X)")); + final double triangles = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("triangle(?X,?Y,?Z)")); + + System.out + .println("Number of vertices not reachable from vertex 1 by a bi-directional path: " + unreachable); + System.out.println("Number of bi-directional triangles: " + (triangles / 6)); } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java index 1e7e5015c..138c58d4b 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java @@ -102,7 +102,7 @@ public static void main(final String[] args) throws IOException, ParsingExceptio * restrictions. */ System.out.println("Before the timeout, the Skolem chase had produced " - + ExamplesUtils.getQueryAnswerCount(queryHasPart, reasoner) + " results for hasPart(?X, ?Y)."); + + reasoner.queryAnswerSize(queryHasPart) + " results for hasPart(?X, ?Y)."); /* * 6. We reset the reasoner to discard all inferences, and apply the Restricted diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java index 74a418a6f..e53b2845a 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java @@ -129,11 +129,12 @@ public static void main(final String[] args) throws IOException { reasoner.reason(); System.out.println("... reasoning completed."); + final PositiveLiteral humansWhoDiedOfCancer = Expressions.makePositiveLiteral("humansWhoDiedOfCancer", x); System.out.println("Humans in Wikidata who died in 2018 due to cancer: " - + ExamplesUtils.getQueryAnswerCount("humansWhoDiedOfCancer(?X)", reasoner)); + + reasoner.queryAnswerSize(humansWhoDiedOfCancer)); System.out.println("Humans in Wikidata who died in 2018 due to some other cause: " - + ExamplesUtils.getQueryAnswerCount(humansWhoDiedOfNoncancer, reasoner)); + + reasoner.queryAnswerSize(humansWhoDiedOfNoncancer)); System.out.println("Done."); } From 23dd7d92ec8bd42fe77c502c521f13f5d174d60b Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 4 Nov 2019 16:40:39 +0100 Subject: [PATCH 0342/1003] auto-format code --- .../vlog4j/core/reasoner/KnowledgeBase.java | 21 +++---- .../core/reasoner/KnowledgeBaseListener.java | 19 +++--- .../core/reasoner/KnowledgeBaseTest.java | 6 +- .../implementation/AddDataSourceTest.java | 58 ++++++++++--------- .../QueryAnsweringCorrectnessTest.java | 24 ++++---- .../VLogReasonerCombinedInputs.java | 3 +- .../implementation/VLogReasonerCsvInput.java | 6 +- .../implementation/VLogReasonerRdfInput.java | 9 ++- .../VLogReasonerSparqlInput.java | 26 +++++---- .../implementation/VLogReasonerStateTest.java | 33 +++++------ .../VLogToModelConverterTest.java | 10 ++-- 11 files changed, 108 insertions(+), 107 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java index c5de17132..8e366147d 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -198,8 +198,7 @@ public void deleteListener(KnowledgeBaseListener listener) { /** * Adds a single statement to the knowledge base. * - * @param statement - * the statement to be added + * @param statement the statement to be added * @return true, if the knowledge base has changed. */ public void addStatement(Statement statement) { @@ -226,8 +225,7 @@ boolean doAddStatement(Statement statement) { /** * Adds a collection of statements to the knowledge base. * - * @param statements - * the statements to be added + * @param statements the statements to be added */ public void addStatements(Collection statements) { final List addedStatements = new ArrayList<>(); @@ -244,8 +242,7 @@ public void addStatements(Collection statements) { /** * Adds a list of statements to the knowledge base. * - * @param statements - * the statements to be added + * @param statements the statements to be added */ public void addStatements(Statement... statements) { final List addedStatements = new ArrayList<>(); @@ -263,8 +260,7 @@ public void addStatements(Statement... statements) { * Removes a single statement from the knowledge base. * * @return true, if the knowledge base has changed. - * @param statement - * the statement to remove + * @param statement the statement to remove */ public void removeStatement(Statement statement) { if (doRemoveStatement(statement)) { @@ -275,8 +271,7 @@ public void removeStatement(Statement statement) { /** * Removes a single statement from the knowledge base. * - * @param statement - * the statement to remove + * @param statement the statement to remove * @return true, if the knowledge base has changed. */ boolean doRemoveStatement(Statement statement) { @@ -292,8 +287,7 @@ boolean doRemoveStatement(Statement statement) { /** * Removes a collection of statements to the knowledge base. * - * @param statements - * the statements to remove + * @param statements the statements to remove */ public void removeStatements(Collection statements) { final List removedStatements = new ArrayList<>(); @@ -310,8 +304,7 @@ public void removeStatements(Collection statements) { /** * Removes a list of statements from the knowledge base. * - * @param statements - * the statements to remove + * @param statements the statements to remove */ public void removeStatements(Statement... statements) { final List removedStatements = new ArrayList<>(); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java index f4dbc47a0..ddbd4fa7a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java @@ -48,23 +48,22 @@ public interface KnowledgeBaseListener { * knowledge base. */ void onStatementsAdded(List statementsAdded); - - + /** - * Event triggered whenever a new statement is removed from the associated knowledge - * base. + * Event triggered whenever a new statement is removed from the associated + * knowledge base. * * @param statementRemoved statement removed from the knowledge base. */ void onStatementRemoved(Statement statementRemoved); - + /** - * Event triggered whenever new statements are removed from the associated knowledge - * base. + * Event triggered whenever new statements are removed from the associated + * knowledge base. * - * @param statementsRemoved a list of new statements that have been removed from the - * knowledge base. + * @param statementsRemoved a list of new statements that have been removed from + * the knowledge base. */ void onStatementsRemoved(List statementsRemoved); - + } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java index 7a0d5052b..61a274f23 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java @@ -71,7 +71,7 @@ public void testDoRemoveStatementInexistent() { assertFalse(removed); assertEquals(Arrays.asList(this.fact1, this.fact2, this.fact3), this.kb.getFacts()); assertEquals(Sets.newSet(this.fact1, this.fact2), this.kb.getFactsByPredicate().get(fact.getPredicate())); - + assertEquals(Sets.newSet(this.fact1, this.fact2), this.kb.getFactsByPredicate().get(this.fact1.getPredicate())); assertEquals(Sets.newSet(this.fact1, this.fact2), this.kb.getFactsByPredicate().get(this.fact2.getPredicate())); assertEquals(Sets.newSet(this.fact3), this.kb.getFactsByPredicate().get(this.fact3.getPredicate())); @@ -79,14 +79,14 @@ public void testDoRemoveStatementInexistent() { @Test public void testDoRemoveStatementInexistentPredicate() { - + final Fact fact = Expressions.makeFact("R", Expressions.makeAbstractConstant("e")); final boolean removed = this.kb.doRemoveStatement(fact); assertFalse(removed); assertEquals(Arrays.asList(this.fact1, this.fact2, this.fact3), this.kb.getFacts()); assertEquals(null, this.kb.getFactsByPredicate().get(fact.getPredicate())); - + assertEquals(Sets.newSet(this.fact1, this.fact2), this.kb.getFactsByPredicate().get(this.fact1.getPredicate())); assertEquals(Sets.newSet(this.fact1, this.fact2), this.kb.getFactsByPredicate().get(this.fact2.getPredicate())); assertEquals(Sets.newSet(this.fact3), this.kb.getFactsByPredicate().get(this.fact3.getPredicate())); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java index a1534d15d..db5c356ae 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java @@ -49,11 +49,13 @@ public class AddDataSourceTest { private static final String CSV_FILE_c_d_PATH = FileDataSourceTestUtils.INPUT_FOLDER + "unaryFactsCD.csv"; - private final Set> csvFile_c1_c2_Content = new HashSet<>(Arrays - .asList(Arrays.asList(Expressions.makeAbstractConstant("c1")), Arrays.asList(Expressions.makeAbstractConstant("c2")))); + private final Set> csvFile_c1_c2_Content = new HashSet<>( + Arrays.asList(Arrays.asList(Expressions.makeAbstractConstant("c1")), + Arrays.asList(Expressions.makeAbstractConstant("c2")))); private final Set> csvFile_c_d_Content = new HashSet<>( - Arrays.asList(Arrays.asList(Expressions.makeAbstractConstant("c")), Arrays.asList(Expressions.makeAbstractConstant("d"))));; + Arrays.asList(Arrays.asList(Expressions.makeAbstractConstant("c")), + Arrays.asList(Expressions.makeAbstractConstant("d"))));; @Test public void testAddDataSourceExistentDataForDifferentPredicates() throws IOException { @@ -99,13 +101,13 @@ public void testAddDataSourceBeforeLoading() throws IOException { kb.addStatement(new DataSourceDeclarationImpl(predicateP, dataSource)); kb.addStatement(new DataSourceDeclarationImpl(predicateQ, dataSource)); reasoner.load(); - try (final QueryResultIterator queryResult = reasoner - .answerQuery(Expressions.makePositiveLiteral(predicateP, Expressions.makeUniversalVariable("x")), true)) { + try (final QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicateP, Expressions.makeUniversalVariable("x")), true)) { assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); } - try (final QueryResultIterator queryResult = reasoner - .answerQuery(Expressions.makePositiveLiteral(predicateQ, Expressions.makeUniversalVariable("x")), true)) { + try (final QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicateQ, Expressions.makeUniversalVariable("x")), true)) { assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); } @@ -122,22 +124,22 @@ public void testAddDataSourceAfterLoading() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - + kb.addStatement(new DataSourceDeclarationImpl(predicateP, dataSource)); - + reasoner.load(); kb.addStatement(new DataSourceDeclarationImpl(predicateQ, dataSource)); - try (final QueryResultIterator queryResult = reasoner - .answerQuery(Expressions.makePositiveLiteral(predicateP, Expressions.makeUniversalVariable("x")), true)) { + try (final QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicateP, Expressions.makeUniversalVariable("x")), true)) { assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); assertEquals(Correctness.INCORRECT, queryResult.getCorrectness()); } - + // there is no fact for predicate Q loaded in the reasoner - try (final QueryResultIterator queryResult = reasoner - .answerQuery(Expressions.makePositiveLiteral(predicateQ, Expressions.makeUniversalVariable("x")), true)) { + try (final QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicateQ, Expressions.makeUniversalVariable("x")), true)) { assertFalse(queryResult.hasNext()); assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); } @@ -153,21 +155,21 @@ public void testAddDataSourceAfterReasoning() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - + kb.addStatement(new DataSourceDeclarationImpl(predicateP, dataSource)); - + reasoner.reason(); kb.addStatement(new DataSourceDeclarationImpl(predicateQ, dataSource)); - try (final QueryResultIterator queryResult = reasoner - .answerQuery(Expressions.makePositiveLiteral(predicateP, Expressions.makeUniversalVariable("x")), true)) { + try (final QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicateP, Expressions.makeUniversalVariable("x")), true)) { assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); assertEquals(Correctness.INCORRECT, queryResult.getCorrectness()); } // there is no fact for predicate Q loaded in the reasoner - try (final QueryResultIterator queryResult = reasoner - .answerQuery(Expressions.makePositiveLiteral(predicateQ, Expressions.makeUniversalVariable("x")), true)) { + try (final QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicateQ, Expressions.makeUniversalVariable("x")), true)) { assertFalse(queryResult.hasNext()); assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); } @@ -188,8 +190,8 @@ public void testAddDataSourceNoMultipleDataSourcesForPredicate() throws IOExcept try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); - try (QueryResultIterator queryResult = reasoner - .answerQuery(Expressions.makePositiveLiteral(predicate, Expressions.makeUniversalVariable("x")), true)) { + try (QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicate, Expressions.makeUniversalVariable("x")), true)) { System.out.println(QueryResultsUtils.collectQueryResults(queryResult)); } } @@ -210,8 +212,8 @@ public void testAddDataSourceNoFactsForPredicate() throws IOException { try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); - try (QueryResultIterator queryResult = reasoner - .answerQuery(Expressions.makePositiveLiteral(predicate, Expressions.makeUniversalVariable("x")), true)) { + try (QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicate, Expressions.makeUniversalVariable("x")), true)) { QueryResultsUtils.collectQueryResults(queryResult); } } @@ -230,8 +232,8 @@ public void testAddMultipleDataSourcesForPredicateAfterReasoning() throws IOExce try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - try (QueryResultIterator queryResult = reasoner - .answerQuery(Expressions.makePositiveLiteral(predicate, Expressions.makeUniversalVariable("x")), true)) { + try (QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicate, Expressions.makeUniversalVariable("x")), true)) { final Set> expectedAnswers = new HashSet<>(csvFile_c1_c2_Content); expectedAnswers.addAll(csvFile_c_d_Content); @@ -255,8 +257,8 @@ public void testAddDataSourceAndFactsForPredicateAfterReasoning() throws IOExcep try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - try (QueryResultIterator queryResult = reasoner - .answerQuery(Expressions.makePositiveLiteral(predicate, Expressions.makeUniversalVariable("x")), true)) { + try (QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicate, Expressions.makeUniversalVariable("x")), true)) { final Set> expectedAnswers = new HashSet<>(csvFile_c1_c2_Content); expectedAnswers.add(Arrays.asList(Expressions.makeAbstractConstant("a"))); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java index a070f9416..3d8eea89b 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java @@ -490,7 +490,7 @@ public void testStatementsArrayRemovalBeforeLoad() { } } } - + @Test public void testStatementRemovalAfterLoad() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -503,7 +503,7 @@ public void testStatementRemovalAfterLoad() throws IOException { } } } - + @Test public void testStatementsListRemovalAfterLoad() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -516,7 +516,7 @@ public void testStatementsListRemovalAfterLoad() throws IOException { } } } - + @Test public void testStatementsArrayRemovalAfterLoad() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -555,7 +555,7 @@ public void testStatementsListRemovalBeforeReason() throws IOException { } } } - + @Test public void testStatementsArrayRemovalBeforeReason() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -594,7 +594,7 @@ public void testStatementsListRemovalAfterReason() throws IOException { } } } - + @Test public void testStatementsArrayRemovalAfterReason() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -621,7 +621,7 @@ public void testStatementNotRemovedAfterReason() throws IOException { } } } - + @Test public void testStatementsListNotRemovedAfterReason() throws IOException { final Fact newFact = Expressions.makeFact("newPred", c); @@ -635,7 +635,7 @@ public void testStatementsListNotRemovedAfterReason() throws IOException { } } } - + @Test public void testStatementsArrayNotRemovedAfterReason() throws IOException { final Fact newFact = Expressions.makeFact("newPred", c); @@ -649,7 +649,7 @@ public void testStatementsArrayNotRemovedAfterReason() throws IOException { } } } - + @Test public void testRemoveAndAddStatements() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -663,11 +663,11 @@ public void testRemoveAndAddStatements() throws IOException { } } } - + @Test public void testRemoveAndAddSameStatementOnlyFacts() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(factPc,factPd); + kb.addStatements(factPc, factPd); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); kb.removeStatements(factPc); @@ -677,11 +677,11 @@ public void testRemoveAndAddSameStatementOnlyFacts() throws IOException { } } } - + @Test public void testRemoveAndAddStatementsOnlyFacts() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(factPc,factPd); + kb.addStatements(factPc, factPd); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); kb.removeStatements(factPc, factPd); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java index 528dfc7bf..084c1b321 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java @@ -56,7 +56,8 @@ public class VLogReasonerCombinedInputs { final Fact factQc2 = Expressions.makeFact(q, Arrays.asList(Expressions.makeAbstractConstant("c2"))); final Fact factQd = Expressions.makeFact(q, Arrays.asList(Expressions.makeAbstractConstant("d"))); final Fact factPd = Expressions.makeFact("p", Arrays.asList(Expressions.makeAbstractConstant("d"))); - final PositiveLiteral queryQx = Expressions.makePositiveLiteral(q, Arrays.asList(Expressions.makeUniversalVariable("x"))); + final PositiveLiteral queryQx = Expressions.makePositiveLiteral(q, + Arrays.asList(Expressions.makeUniversalVariable("x"))); final Set> resultsCC1C2D = new HashSet<>( Arrays.asList(Collections.singletonList(Expressions.makeAbstractConstant("c")), diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvInput.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvInput.java index f8a55c8b3..2b7d85c58 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvInput.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvInput.java @@ -53,12 +53,12 @@ public class VLogReasonerCsvInput { private final Constant c2 = Expressions.makeAbstractConstant("c2"); @SuppressWarnings("unchecked") - private final Set> expectedUnaryQueryResult = Sets.newSet(Arrays.asList(this.c1), Arrays.asList(this.c2)); + private final Set> expectedUnaryQueryResult = Sets.newSet(Arrays.asList(this.c1), + Arrays.asList(this.c2)); @Test public void testLoadEmptyCsvFile() throws IOException { - final PositiveLiteral queryAtom = Expressions.makePositiveLiteral(unaryPredicate1, - this.x); + final PositiveLiteral queryAtom = Expressions.makePositiveLiteral(unaryPredicate1, this.x); FileDataSourceTestUtils.testLoadEmptyFile(unaryPredicate1, queryAtom, new CsvFileDataSource(new File(FileDataSourceTestUtils.INPUT_FOLDER + "empty.csv"))); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerRdfInput.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerRdfInput.java index b00fd9d28..d278116a3 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerRdfInput.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerRdfInput.java @@ -44,13 +44,16 @@ public class VLogReasonerRdfInput { private static final Predicate ternaryPredicate = Expressions.makePredicate("triple", 3); private static final PositiveLiteral queryAtom = Expressions.makePositiveLiteral(ternaryPredicate, - Expressions.makeUniversalVariable("s"), Expressions.makeUniversalVariable("p"), Expressions.makeUniversalVariable("o")); + Expressions.makeUniversalVariable("s"), Expressions.makeUniversalVariable("p"), + Expressions.makeUniversalVariable("o")); @SuppressWarnings("unchecked") private static final Set> expectedTernaryQueryResult = Sets.newSet( - Arrays.asList(Expressions.makeAbstractConstant("http://example.org/c1"), Expressions.makeAbstractConstant("http://example.org/p"), + Arrays.asList(Expressions.makeAbstractConstant("http://example.org/c1"), + Expressions.makeAbstractConstant("http://example.org/p"), Expressions.makeAbstractConstant("http://example.org/c2")), - Arrays.asList(Expressions.makeAbstractConstant("http://example.org/c1"), Expressions.makeAbstractConstant("http://example.org/q"), + Arrays.asList(Expressions.makeAbstractConstant("http://example.org/c1"), + Expressions.makeAbstractConstant("http://example.org/q"), Expressions.makeDatatypeConstant("test string", "http://www.w3.org/2001/XMLSchema#string"))); @Ignore diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerSparqlInput.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerSparqlInput.java index ad16cb1ea..521812028 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerSparqlInput.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerSparqlInput.java @@ -65,8 +65,9 @@ public void testSimpleSparqlQuery() throws IOException { try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); - try (final QueryResultIterator answerQuery = reasoner.answerQuery(Expressions.makePositiveLiteral( - fatherOfPredicate, Expressions.makeUniversalVariable("x"), Expressions.makeUniversalVariable("y")), false)) { + try (final QueryResultIterator answerQuery = reasoner + .answerQuery(Expressions.makePositiveLiteral(fatherOfPredicate, + Expressions.makeUniversalVariable("x"), Expressions.makeUniversalVariable("y")), false)) { assertTrue(answerQuery.hasNext()); final QueryResult firstAnswer = answerQuery.next(); @@ -90,8 +91,9 @@ public void testSimpleSparqlQueryHttps() throws IOException { try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); - try (final QueryResultIterator answerQuery = reasoner.answerQuery(Expressions.makePositiveLiteral( - fatherOfPredicate, Expressions.makeUniversalVariable("x"), Expressions.makeUniversalVariable("y")), false)) { + try (final QueryResultIterator answerQuery = reasoner + .answerQuery(Expressions.makePositiveLiteral(fatherOfPredicate, + Expressions.makeUniversalVariable("x"), Expressions.makeUniversalVariable("y")), false)) { assertTrue(answerQuery.hasNext()); final QueryResult firstAnswer = answerQuery.next(); @@ -124,8 +126,9 @@ public void testSimpleSparqlQuery2() throws IOException { try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); - try (final QueryResultIterator answerQuery = reasoner.answerQuery(Expressions.makePositiveLiteral( - fatherOfPredicate, Expressions.makeUniversalVariable("x"), Expressions.makeUniversalVariable("y")), false)) { + try (final QueryResultIterator answerQuery = reasoner + .answerQuery(Expressions.makePositiveLiteral(fatherOfPredicate, + Expressions.makeUniversalVariable("x"), Expressions.makeUniversalVariable("y")), false)) { assertTrue(answerQuery.hasNext()); } @@ -147,8 +150,8 @@ public void testConjunctiveQueryNewLineCharacterInQueryBody() throws IOException try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); - reasoner.answerQuery(Expressions.makePositiveLiteral(haveChildrenTogether, Expressions.makeUniversalVariable("x"), - Expressions.makeUniversalVariable("y")), false); + reasoner.answerQuery(Expressions.makePositiveLiteral(haveChildrenTogether, + Expressions.makeUniversalVariable("x"), Expressions.makeUniversalVariable("y")), false); } } @@ -167,8 +170,11 @@ public void testConjunctiveQuery() throws IOException { try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); - try (final QueryResultIterator answerQuery = reasoner.answerQuery(Expressions.makePositiveLiteral( - haveChildrenTogether, Expressions.makeUniversalVariable("x"), Expressions.makeUniversalVariable("y")), false)) { + try (final QueryResultIterator answerQuery = reasoner + .answerQuery( + Expressions.makePositiveLiteral(haveChildrenTogether, + Expressions.makeUniversalVariable("x"), Expressions.makeUniversalVariable("y")), + false)) { assertTrue(answerQuery.hasNext()); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerStateTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerStateTest.java index 4cbea07ad..6fb85e3c8 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerStateTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerStateTest.java @@ -79,7 +79,7 @@ public void testFailExportQueryAnswersBeforeLoad() throws IOException { reasoner.exportQueryAnswersToCsv(exampleQueryAtom, "", true); } } - + @Test(expected = ReasonerStateException.class) public void testFailAnswerQueryAfterReset() throws IOException { try (final Reasoner reasoner = Reasoner.getInstance()) { @@ -318,7 +318,7 @@ public void testCloseRepeatedly() throws IOException { reasoner.close(); } } - + @Test public void testStatementRemovalBeforeLoad() { final KnowledgeBase kb = new KnowledgeBase(); @@ -328,7 +328,7 @@ public void testStatementRemovalBeforeLoad() { assertEquals(ReasonerState.KB_NOT_LOADED, reasoner.getReasonerState()); } } - + @Test public void testStatementsListRemovalBeforeLoad() { final KnowledgeBase kb = new KnowledgeBase(); @@ -338,7 +338,7 @@ public void testStatementsListRemovalBeforeLoad() { assertEquals(ReasonerState.KB_NOT_LOADED, reasoner.getReasonerState()); } } - + @Test public void testStatementsArrayRemovalBeforeLoad() { final KnowledgeBase kb = new KnowledgeBase(); @@ -348,7 +348,7 @@ public void testStatementsArrayRemovalBeforeLoad() { assertEquals(ReasonerState.KB_NOT_LOADED, reasoner.getReasonerState()); } } - + @Test public void testStatementRemovalAfterLoad() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -359,7 +359,7 @@ public void testStatementRemovalAfterLoad() throws IOException { assertEquals(ReasonerState.KB_CHANGED, reasoner.getReasonerState()); } } - + @Test public void testStatementsListRemovalAfterLoad() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -370,7 +370,7 @@ public void testStatementsListRemovalAfterLoad() throws IOException { assertEquals(ReasonerState.KB_CHANGED, reasoner.getReasonerState()); } } - + @Test public void testStatementsArrayRemovalAfterLoad() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -381,7 +381,7 @@ public void testStatementsArrayRemovalAfterLoad() throws IOException { assertEquals(ReasonerState.KB_CHANGED, reasoner.getReasonerState()); } } - + @Test public void testStatementRemovalBeforeReason() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -392,7 +392,7 @@ public void testStatementRemovalBeforeReason() throws IOException { assertEquals(ReasonerState.MATERIALISED, reasoner.getReasonerState()); } } - + @Test public void testStatementsListRemovalBeforeReason() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -403,7 +403,7 @@ public void testStatementsListRemovalBeforeReason() throws IOException { assertEquals(ReasonerState.MATERIALISED, reasoner.getReasonerState()); } } - + @Test public void testStatementsArrayRemovalBeforeReason() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -414,7 +414,7 @@ public void testStatementsArrayRemovalBeforeReason() throws IOException { assertEquals(ReasonerState.MATERIALISED, reasoner.getReasonerState()); } } - + @Test public void testStatementRemovalAfterReason() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -425,7 +425,7 @@ public void testStatementRemovalAfterReason() throws IOException { assertEquals(ReasonerState.KB_CHANGED, reasoner.getReasonerState()); } } - + @Test public void testStatementsListRemovalAfterReason() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -436,7 +436,7 @@ public void testStatementsListRemovalAfterReason() throws IOException { assertEquals(ReasonerState.KB_CHANGED, reasoner.getReasonerState()); } } - + @Test public void testStatementsArrayRemovalAfterReason() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -447,7 +447,7 @@ public void testStatementsArrayRemovalAfterReason() throws IOException { assertEquals(ReasonerState.KB_CHANGED, reasoner.getReasonerState()); } } - + @Test public void testStatementNotRemovedAfterReason() throws IOException { final Fact newFact = Expressions.makeFact("newPred", c); @@ -459,7 +459,7 @@ public void testStatementNotRemovedAfterReason() throws IOException { assertEquals(ReasonerState.MATERIALISED, reasoner.getReasonerState()); } } - + @Test public void testStatementsListNotRemovedAfterReason() throws IOException { final Fact newFact = Expressions.makeFact("newPred", c); @@ -471,7 +471,7 @@ public void testStatementsListNotRemovedAfterReason() throws IOException { assertEquals(ReasonerState.MATERIALISED, reasoner.getReasonerState()); } } - + @Test public void testStatementsArrayListNotRemovedAfterReason() throws IOException { final Fact newFact = Expressions.makeFact("newPred", c); @@ -484,5 +484,4 @@ public void testStatementsArrayListNotRemovedAfterReason() throws IOException { } } - } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverterTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverterTest.java index d9246a1e3..0a80eb198 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverterTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverterTest.java @@ -65,20 +65,18 @@ public void testLanguageStringConversion() { Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); assertEquals(vLog4jTerm, convertedTerm); } - + @Test public void testNamedNullConversion() { - karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, - "_123"); + karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, "_123"); Term vLog4jTerm = new NamedNullImpl("_123"); Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); assertEquals(vLog4jTerm, convertedTerm); } - + @Test(expected = IllegalArgumentException.class) public void testVariableConversion() { - karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, - "X"); + karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, "X"); VLogToModelConverter.toTerm(vLogTerm); } From 3dbb6918616e18f49c176bb69c0620320859ccff Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 7 Nov 2019 15:02:32 +0100 Subject: [PATCH 0343/1003] added OwlFeatureNotSupportedException for SWRL rules --- .../org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java index ac158bae0..e50f3c12b 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java +++ b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java @@ -520,7 +520,7 @@ public void visit(final OWLHasKeyAxiom axiom) { @Override public void visit(final SWRLRule rule) { - // TODO support SWRL rules + throw new OwlFeatureNotSupportedException("SWRLRule currently not supported."); } From 12e2cb8db2cfc150d86f4f1983c0a8a01991b0d9 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 7 Nov 2019 15:37:14 +0100 Subject: [PATCH 0344/1003] OWLObjectOneOf in body converter --- .../vlog4j/owlapi/ClassToRuleBodyConverter.java | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleBodyConverter.java b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleBodyConverter.java index e6b19f79e..28265f47a 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleBodyConverter.java +++ b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleBodyConverter.java @@ -153,13 +153,7 @@ public void visit(final OWLObjectHasSelf ce) { @Override public void visit(final OWLObjectOneOf ce) { - // ce.individuals().forEach(individual -> { - // final Term individualTerm = - // OwlToRulesConversionHelper.getIndividualTerm(individual); - // replaceTerm(this.body, individualTerm); - // replaceTerm(this.head, individualTerm); - // }); - // TODO Auto-generated method parenstub + throw new RuntimeException("This should never occur: BodyConverter for "+ce); } From 0a3be6c275345642ff4954e1cb301cc655618fd9 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 7 Nov 2019 15:37:30 +0100 Subject: [PATCH 0345/1003] OWLObjectOneOf in body converter --- .../owlapi/OwlAxiomToRulesConverter.java | 28 +++++++++---------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java index e50f3c12b..13dab444a 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java +++ b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java @@ -221,24 +221,24 @@ void startAxiomConversion() { * @param superClass */ void addSubClassAxiom(final OWLClassExpression subClass, final OWLClassExpression superClass) { - this.startAxiomConversion(); - - final ClassToRuleHeadConverter headConverter = new ClassToRuleHeadConverter(this.frontierVariable, this); - superClass.accept(headConverter); - final ClassToRuleBodyConverter bodyConverter = new ClassToRuleBodyConverter(this.frontierVariable, - headConverter.body, headConverter.head, this); - bodyConverter.handleDisjunction(subClass, this.frontierVariable); - this.addRule(bodyConverter); + if (subClass instanceof OWLObjectOneOf) { + final OWLObjectOneOf subClassInstaceOf = (OWLObjectOneOf) subClass; + subClassInstaceOf.individuals().forEach(individual -> visitClassAssertionAxiom(individual, superClass)); + } else { + this.startAxiomConversion(); + + final ClassToRuleHeadConverter headConverter = new ClassToRuleHeadConverter(this.frontierVariable, this); + superClass.accept(headConverter); + final ClassToRuleBodyConverter bodyConverter = new ClassToRuleBodyConverter(this.frontierVariable, + headConverter.body, headConverter.head, this); + bodyConverter.handleDisjunction(subClass, this.frontierVariable); + this.addRule(bodyConverter); + } } @Override public void visit(final OWLSubClassOfAxiom axiom) { - if (axiom.getSubClass() instanceof OWLObjectOneOf) { - final OWLObjectOneOf subClass = (OWLObjectOneOf) axiom.getSubClass(); - subClass.individuals().forEach(individual -> visitClassAssertionAxiom(individual, axiom.getSuperClass())); - } else { - this.addSubClassAxiom(axiom.getSubClass(), axiom.getSuperClass()); - } + this.addSubClassAxiom(axiom.getSubClass(), axiom.getSuperClass()); } @Override From bd9058bc6beb0bcfcd5fe90e02b8d7b878c226d9 Mon Sep 17 00:00:00 2001 From: alloka Date: Thu, 7 Nov 2019 16:14:49 +0100 Subject: [PATCH 0346/1003] fixed some styles --- .../model/implementation/ConjunctionImpl.java | 2 +- .../DataSourceDeclarationImpl.java | 5 +- .../core/model/implementation/Serializer.java | 46 +++++++++++++++---- .../core/model/ConjunctionImplTest.java | 2 +- .../core/model/DataSourceDeclarationTest.java | 2 +- .../vlog4j/core/model/FactTest.java | 2 +- .../vlog4j/core/model/TermImplTest.java | 12 ++--- 7 files changed, 48 insertions(+), 23 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java index 69df24198..735fea4e9 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java @@ -95,7 +95,7 @@ public String toString() { } else { stringBuilder.append(", "); } - stringBuilder.append(Serializer.getLiteralString((AbstractLiteralImpl) literal)); + stringBuilder.append(Serializer.getLiteralString(literal)); } return stringBuilder.toString(); } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java index 217538610..a40eae7b7 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java @@ -77,7 +77,7 @@ public boolean equals(final Object obj) { return (this.predicate.equals(other.getPredicate())) && this.dataSource.equals(other.getDataSource()); } - + @Override public T accept(StatementVisitor statementVisitor) { return statementVisitor.visit(this); @@ -85,8 +85,7 @@ public T accept(StatementVisitor statementVisitor) { @Override public String toString() { - return "@source " + Serializer.getPredicateString(this.predicate) + "(" + this.predicate.getArity() + ") : " - + this.dataSource.toConfigString() + " ."; + return Serializer.getDataSourceDeclarationString(this); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 01496eb18..cd5bab3aa 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -24,16 +24,22 @@ import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; +import org.semanticweb.vlog4j.core.model.api.ExistentialVariable; import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.LanguageStringConstant; import org.semanticweb.vlog4j.core.model.api.Literal; +import org.semanticweb.vlog4j.core.model.api.NamedNull; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.model.api.UniversalVariable; import org.semanticweb.vlog4j.core.model.api.Variable; /** - * Simple class implementation of various toString methods to ensure the correct - * parsable string output of the different Data models. + * A utility class with static methods to obtain the correct parsable string + * representation of the different data models. * * @author Ali Elhalawati * @@ -44,11 +50,11 @@ private Serializer() { } - public static String getRuleString(RuleImpl rule) { + public static String getRuleString(Rule rule) { return rule.getHead() + " :- " + rule.getBody() + "."; } - public static String getLiteralString(AbstractLiteralImpl literal) { + public static String getLiteralString(Literal literal) { final StringBuilder stringBuilder = new StringBuilder(""); if (literal.isNegated()) { stringBuilder.append("~"); @@ -67,27 +73,27 @@ public static String getLiteralString(AbstractLiteralImpl literal) { return stringBuilder.toString(); } - public static String getConstantString(AbstractConstantImpl constant) { + public static String getConstantString(Constant constant) { return constant.getName(); } - public static String getExistentialVarString(ExistentialVariableImpl existentialvariable) { + public static String getExistentialVarString(ExistentialVariable existentialvariable) { return "!" + existentialvariable.getName(); } - public static String getUniversalVarString(UniversalVariableImpl universalvariable) { + public static String getUniversalVarString(UniversalVariable universalvariable) { return "?" + universalvariable.getName(); } - public static String getDatatypeConstantString(DatatypeConstantImpl datatypeconstant) { + public static String getDatatypeConstantString(DatatypeConstant datatypeconstant) { return datatypeconstant.getName(); } - public static String getNamedNullString(NamedNullImpl namednull) { + public static String getNamedNullString(NamedNull namednull) { return "_" + namednull.getName(); } - public static String getLanguageConstantString(LanguageStringConstantImpl languagestringconstant) { + public static String getLanguageConstantString(LanguageStringConstant languagestringconstant) { return languagestringconstant.getName(); } @@ -95,4 +101,24 @@ public static String getPredicateString(Predicate predicate) { return " Predicate [ name= " + predicate.getName() + ", arity= " + predicate.getArity() + "]"; } + public static String getDataSourceDeclarationString(DataSourceDeclaration datasourcedeclaration) { + return "@source " + getPredicateString(datasourcedeclaration.getPredicate()) + "(" + + datasourcedeclaration.getPredicate().getArity() + ") : " + + datasourcedeclaration.getDataSource().toConfigString() + " ."; + } + + public static String getConjunctionString(Conjunction conjunction) { + final StringBuilder stringBuilder = new StringBuilder(); + boolean first = true; + for (final Literal literal : conjunction.getLiterals()) { + if (first) { + first = false; + } else { + stringBuilder.append(", "); + } + stringBuilder.append(getLiteralString(literal)); + } + return stringBuilder.toString(); + } + } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java index 4df0c8f68..c3c632c77 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java @@ -182,7 +182,7 @@ public void negativeLiteralsNoNullElements() { } @Test - public void testconjunctiontoString() { + public void conjunctiontToStringTest() { final Variable x = Expressions.makeUniversalVariable("X"); final Variable y = Expressions.makeUniversalVariable("Y"); final Constant c = Expressions.makeAbstractConstant("c"); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java index 4e35fcf32..3091216d5 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java @@ -63,7 +63,7 @@ public void equalityTest() throws MalformedURLException { } @Test - public void testdataSourceDecalarationtoString() throws MalformedURLException { + public void dataSourceDecalarationToStringTest() throws MalformedURLException { DataSource dataSource1 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var", "?var wdt:P31 wd:Q5 ."); Predicate predicate1 = Expressions.makePredicate("p", 3); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java index d2da2f1de..7f39dd875 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java @@ -55,7 +55,7 @@ public void factsOnlyContainConstants() { } @Test - public void testtoString() { + public void factToStringTest() { final Predicate p = Expressions.makePredicate("p", 2); final Constant c = Expressions.makeAbstractConstant("c"); final Constant d = Expressions.makeAbstractConstant("d"); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java index 1c9e98f1f..2a218a6a2 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java @@ -133,37 +133,37 @@ public void namedNullGetterTest() { } @Test - public void testabstractConstantImpltoString() { + public void abstractConstantToStringTest() { AbstractConstantImpl c = new AbstractConstantImpl("c"); assertEquals("c", c.toString()); } @Test - public void testdatatypeConstantImpltoString() { + public void datatypeConstantToStringTest() { DatatypeConstantImpl c = new DatatypeConstantImpl("c", "http://example.org/mystring"); assertEquals("\"c\"^^", c.toString()); } @Test - public void testlanguageStringConstantImpltoString() { + public void languageStringConstantToStringTest() { LanguageStringConstantImpl c = new LanguageStringConstantImpl("Test", "en"); assertEquals("\"Test\"@en", c.toString()); } @Test - public void testuniversalVariabletoString() { + public void universalVariableToStringTest() { UniversalVariableImpl v = new UniversalVariableImpl("v"); assertEquals("?v", v.toString()); } @Test - public void testexistentialVariabletoString() { + public void existentialVariableToStringTest() { ExistentialVariableImpl v = new ExistentialVariableImpl("v"); assertEquals("!v", v.toString()); } @Test - public void testnamedNulltoString() { + public void namedNullToStringTest() { NamedNullImpl n = new NamedNullImpl("123"); assertEquals("_123", n.toString()); } From 8825fa8a85d51a2adff94dc5f5359fbbc2cc8a02 Mon Sep 17 00:00:00 2001 From: alloka Date: Thu, 7 Nov 2019 16:47:41 +0100 Subject: [PATCH 0347/1003] changed some styles --- .../vlog4j/core/model/implementation/FactImpl.java | 5 ++--- .../vlog4j/core/model/implementation/Serializer.java | 2 +- .../java/org/semanticweb/vlog4j/core/model/RuleImplTest.java | 2 +- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java index b08939577..63a931e99 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java @@ -47,11 +47,10 @@ public FactImpl(Predicate predicate, List terms) { public T accept(StatementVisitor statementVisitor) { return statementVisitor.visit(this); } - + @Override public String toString() { - return Serializer.getLiteralString(this)+"."; + return Serializer.getLiteralString(this) + "."; } - } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index cd5bab3aa..3cffd0168 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -106,7 +106,7 @@ public static String getDataSourceDeclarationString(DataSourceDeclaration dataso + datasourcedeclaration.getPredicate().getArity() + ") : " + datasourcedeclaration.getDataSource().toConfigString() + " ."; } - + public static String getConjunctionString(Conjunction conjunction) { final StringBuilder stringBuilder = new StringBuilder(); boolean first = true; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java index 7c05f8b61..ef5dce07b 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java @@ -139,7 +139,7 @@ public void noUnsafeVariables() { } @Test - public void testtoString() { + public void ruleToStringTest() { final Variable x = Expressions.makeUniversalVariable("X"); final Variable y = Expressions.makeExistentialVariable("Y"); final Variable z = Expressions.makeUniversalVariable("Z"); From be2656160aa4a01e18f82430c3a726e619775ea4 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 8 Nov 2019 14:11:20 +0100 Subject: [PATCH 0348/1003] draft support nominals in complex class expressions --- .../owlapi/ClassToRuleBodyConverter.java | 27 +++- .../vlog4j/owlapi/RulesHelper.java | 42 +++++++ .../owlapi/OwlAxiomToRulesConverterTest.java | 115 ++++++++++++++---- .../vlog4j/owlapi/TestRulesHelper.java | 25 ++++ 4 files changed, 181 insertions(+), 28 deletions(-) create mode 100644 vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/RulesHelper.java create mode 100644 vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/TestRulesHelper.java diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleBodyConverter.java b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleBodyConverter.java index 28265f47a..01ea37ff9 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleBodyConverter.java +++ b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleBodyConverter.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.owlapi; +import java.util.ArrayList; + /*- * #%L * VLog4j OWL API Support @@ -21,6 +23,8 @@ */ import java.util.Arrays; +import java.util.List; +import java.util.function.UnaryOperator; import java.util.stream.Collectors; import org.semanticweb.owlapi.model.OWLClass; @@ -42,6 +46,7 @@ import org.semanticweb.owlapi.model.OWLObjectOneOf; import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom; import org.semanticweb.owlapi.model.OWLObjectUnionOf; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Variable; @@ -153,8 +158,26 @@ public void visit(final OWLObjectHasSelf ce) { @Override public void visit(final OWLObjectOneOf ce) { - throw new RuntimeException("This should never occur: BodyConverter for "+ce); - + System.out.println("Body"); + System.out.println(this.body.getConjuncts()); + System.out.println("Head"); + System.out.println(this.head.getConjuncts()); + + System.out.println("Parent body: " + parent.rules); + + ce.individuals().forEach(individual -> { + final Term individualTerm = OwlToRulesConversionHelper.getIndividualTerm(individual); + + if (this.body.exists()) { + SimpleConjunction newBody = RulesHelper.replaceTerm(this.body, this.mainTerm, individualTerm); + System.out.println("New Body: " + newBody); + } + if (this.head.exists()) { + SimpleConjunction newHead = RulesHelper.replaceTerm(this.head, this.mainTerm, individualTerm); + System.out.println("New Head: " + newHead); + } + + }); } @Override diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/RulesHelper.java b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/RulesHelper.java new file mode 100644 index 000000000..74e9c1822 --- /dev/null +++ b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/RulesHelper.java @@ -0,0 +1,42 @@ +package org.semanticweb.vlog4j.owlapi; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.UnaryOperator; + +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; +import org.semanticweb.vlog4j.owlapi.AbstractClassToRuleConverter.SimpleConjunction; + +public final class RulesHelper { + + private RulesHelper() {} + + static SimpleConjunction replaceTerm(SimpleConjunction conjunction, Term mainTerm, Term individualTerm) { + SimpleConjunction newSimpleConjunction = new SimpleConjunction(); + conjunction.getConjuncts().forEach(conjunct -> { + PositiveLiteral newLiteral = replaceTerm(conjunct, mainTerm, individualTerm); + newSimpleConjunction.add(newLiteral); + }); + return newSimpleConjunction; + } + + static PositiveLiteral replaceTerm(PositiveLiteral positiveLiteral, Term sourceTerm, Term targetTerm) { + + List arguments = positiveLiteral.getArguments(); + List modifiableArguments = replaceTerm(sourceTerm, targetTerm, arguments); + + return new PositiveLiteralImpl(positiveLiteral.getPredicate(), modifiableArguments); + } + + static List replaceTerm(Term sourceTerm, Term targetTerm, List terms) { + List newTerms = new ArrayList<>(terms); + + UnaryOperator replaceSourceTerm = term -> term.equals(sourceTerm) ? targetTerm : term; + newTerms.replaceAll(replaceSourceTerm); + + return newTerms; + } + +} diff --git a/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverterTest.java b/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverterTest.java index 6e10442ac..ac5184f91 100644 --- a/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverterTest.java +++ b/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverterTest.java @@ -36,11 +36,13 @@ import org.semanticweb.owlapi.model.OWLDataFactory; import org.semanticweb.owlapi.model.OWLIndividual; import org.semanticweb.owlapi.model.OWLObjectIntersectionOf; +import org.semanticweb.owlapi.model.OWLObjectOneOf; import org.semanticweb.owlapi.model.OWLObjectProperty; import org.semanticweb.owlapi.model.OWLObjectPropertyExpression; import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom; import org.semanticweb.owlapi.model.OWLObjectUnionOf; import org.semanticweb.owlapi.model.OWLSubClassOfAxiom; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Rule; @@ -72,29 +74,33 @@ public static Predicate getPropertyPredicate(final String localName) { return Expressions.makePredicate("http://example.org/" + localName, 2); } - static OWLClass cA = getOwlClass("A"); - static OWLClass cB = getOwlClass("B"); - static OWLClass cC = getOwlClass("C"); - static OWLClass cD = getOwlClass("D"); - static OWLClass cE = getOwlClass("E"); - static OWLObjectProperty pR = getOwlObjectProperty("Rule"); - static OWLObjectProperty pS = getOwlObjectProperty("S"); - static OWLObjectProperty pT = getOwlObjectProperty("T"); - static OWLObjectProperty pU = getOwlObjectProperty("U"); - - static Predicate nA = getClassPredicate("A"); - static Predicate nB = getClassPredicate("B"); - static Predicate nC = getClassPredicate("C"); - static Predicate nD = getClassPredicate("D"); - static Predicate nE = getClassPredicate("E"); - static Predicate nR = getPropertyPredicate("Rule"); - static Predicate nS = getPropertyPredicate("S"); - static Predicate nT = getPropertyPredicate("T"); - static Predicate nU = getPropertyPredicate("U"); - - static OWLIndividual inda = df.getOWLNamedIndividual(getIri("a")); - static OWLIndividual indb = df.getOWLNamedIndividual(getIri("b")); - static OWLIndividual indc = df.getOWLNamedIndividual(getIri("c")); + static final OWLClass cA = getOwlClass("A"); + static final OWLClass cB = getOwlClass("B"); + static final OWLClass cC = getOwlClass("C"); + static final OWLClass cD = getOwlClass("D"); + static final OWLClass cE = getOwlClass("E"); + static final OWLObjectProperty pR = getOwlObjectProperty("Rule"); + static final OWLObjectProperty pS = getOwlObjectProperty("S"); + static final OWLObjectProperty pT = getOwlObjectProperty("T"); + static final OWLObjectProperty pU = getOwlObjectProperty("U"); + + static final Predicate nA = getClassPredicate("A"); + static final Predicate nB = getClassPredicate("B"); + static final Predicate nC = getClassPredicate("C"); + static final Predicate nD = getClassPredicate("D"); + static final Predicate nE = getClassPredicate("E"); + static final Predicate nR = getPropertyPredicate("Rule"); + static final Predicate nS = getPropertyPredicate("S"); + static final Predicate nT = getPropertyPredicate("T"); + static final Predicate nU = getPropertyPredicate("U"); + + static final OWLIndividual inda = df.getOWLNamedIndividual(getIri("a")); + static final OWLIndividual indb = df.getOWLNamedIndividual(getIri("b")); + static final OWLIndividual indc = df.getOWLNamedIndividual(getIri("c")); + + static final Term consta = Expressions.makeAbstractConstant(getIri("a").toString()); + static final Term constb = Expressions.makeAbstractConstant(getIri("b").toString()); + static final Term constc = Expressions.makeAbstractConstant(getIri("c").toString()); @Test public void testSimpleRule() { @@ -387,9 +393,7 @@ public void testClassAssertions() { Ca.accept(converter); BandhasRba.accept(converter); - final Term consta = Expressions.makeAbstractConstant(getIri("a").toString()); - final Term constb = Expressions.makeAbstractConstant(getIri("b").toString()); - final Term constc = Expressions.makeAbstractConstant(getIri("c").toString()); + final PositiveLiteral atC = Expressions.makePositiveLiteral(nC, constc); final PositiveLiteral atB = Expressions.makePositiveLiteral(nB, consta); final PositiveLiteral atR = Expressions.makePositiveLiteral(nR, consta, constb); @@ -632,6 +636,65 @@ public void testObjectPropertyRange() { assertEquals(Collections.singleton(rule), converter.rules); } + @Test + public void testNominalSubClassOfClass() { + OWLObjectOneOf oneOfa = df.getOWLObjectOneOf(inda); + OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(oneOfa, cA); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final Fact expectedFact = Expressions.makeFact(nA, consta); + assertEquals(Collections.singleton(expectedFact), converter.facts); + assertTrue(converter.rules.isEmpty()); + } + + @Test + public void testNominalsSubClassOfClass() { + OWLObjectOneOf oneOfab = df.getOWLObjectOneOf(inda, indb); + OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(oneOfab, cA); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final Fact expectedFact1 = Expressions.makeFact(nA, consta); + final Fact expectedFact2 = Expressions.makeFact(nA, constb); + + assertEquals(Sets.newSet(expectedFact1,expectedFact2), converter.facts); + assertTrue(converter.rules.isEmpty()); + } + + @Test + public void testNominalsInConjunctionSubClassOfClass() { + OWLObjectOneOf oneOfab = df.getOWLObjectOneOf(inda, indb); + OWLObjectIntersectionOf conjunction = df.getOWLObjectIntersectionOf(oneOfab,cB); + OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(conjunction, cA); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); +//TODO + System.out.println(converter.rules); + + } + + @Test(expected = OwlFeatureNotSupportedException.class) + public void testNominalSuperClassOfClass() { + OWLObjectOneOf oneOfa = df.getOWLObjectOneOf(inda); + OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(cA, oneOfa); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + } + + @Test(expected = OwlFeatureNotSupportedException.class) + public void testNominalsSuperClassOfClass() { + OWLObjectOneOf oneOfab = df.getOWLObjectOneOf(inda, indb); + OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(cA,oneOfab); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + } + @Ignore public void test() { final OWLObjectPropertyExpression Sinv = df.getOWLObjectInverseOf(pS); diff --git a/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/TestRulesHelper.java b/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/TestRulesHelper.java new file mode 100644 index 000000000..f7cee4055 --- /dev/null +++ b/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/TestRulesHelper.java @@ -0,0 +1,25 @@ +package org.semanticweb.vlog4j.owlapi; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; +import org.semanticweb.vlog4j.core.model.api.AbstractConstant; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.UniversalVariable; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; + +public class TestRulesHelper { + + @Test + public void testReplaceTerm() { + AbstractConstant c1 = Expressions.makeAbstractConstant("c1"); + UniversalVariable v1 = Expressions.makeUniversalVariable("v1"); + UniversalVariable v2 = Expressions.makeUniversalVariable("v2"); + + PositiveLiteral positiveLiteral = Expressions.makePositiveLiteral("a", v1, v1, v2, c1); + + PositiveLiteral expectedLiteral = Expressions.makePositiveLiteral("a", c1, c1, v2, c1); + assertEquals(expectedLiteral, RulesHelper.replaceTerm(positiveLiteral, v1, c1)); + } + +} From 40ac51a18d53f69f13a4bfcc2f72e9a9f64be1fd Mon Sep 17 00:00:00 2001 From: alloka Date: Fri, 8 Nov 2019 21:21:20 +0100 Subject: [PATCH 0349/1003] fixed styles and duplicates --- .../model/implementation/ConjunctionImpl.java | 12 +------- .../implementation/DatatypeConstantImpl.java | 2 +- .../LanguageStringConstantImpl.java | 2 +- .../model/implementation/PredicateImpl.java | 8 ++--- .../core/model/implementation/Serializer.java | 30 +++++++------------ .../core/model/ConjunctionImplTest.java | 2 +- 6 files changed, 18 insertions(+), 38 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java index 735fea4e9..c28374ad7 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java @@ -87,17 +87,7 @@ public Iterator iterator() { @Override public String toString() { - final StringBuilder stringBuilder = new StringBuilder(); - boolean first = true; - for (final T literal : this.literals) { - if (first) { - first = false; - } else { - stringBuilder.append(", "); - } - stringBuilder.append(Serializer.getLiteralString(literal)); - } - return stringBuilder.toString(); + return Serializer.getConjunctionString(this); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java index 907758747..0f7d85135 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java @@ -60,7 +60,7 @@ public String getLexicalValue() { @Override public String toString() { - return Serializer.getDatatypeConstantString(this); + return Serializer.getConstantString(this); } @Override diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java index 084143187..7e8657216 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java @@ -84,7 +84,7 @@ public boolean equals(Object obj) { @Override public String toString() { - return Serializer.getLanguageConstantString(this); + return Serializer.getConstantString(this); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PredicateImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PredicateImpl.java index 3ad38a877..27d632c6d 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PredicateImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PredicateImpl.java @@ -40,10 +40,8 @@ public class PredicateImpl implements Predicate { /** * Constructor for {@link Predicate}s of arity 1 or higher. * - * @param name - * a non-blank String (not null, nor empty or whitespace). - * @param arity - * an int value strictly greater than 0. + * @param name a non-blank String (not null, nor empty or whitespace). + * @param arity an int value strictly greater than 0. */ public PredicateImpl(@NonNull String name, int arity) { Validate.notBlank(name, "Predicates cannot be named by blank Strings."); @@ -89,7 +87,7 @@ public boolean equals(Object obj) { @Override public String toString() { - return "PredicateImpl [name=" + this.name + ", arity=" + this.arity + "]"; + return Serializer.getPredicateString(this); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 3cffd0168..839db2ff9 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -77,37 +77,29 @@ public static String getConstantString(Constant constant) { return constant.getName(); } - public static String getExistentialVarString(ExistentialVariable existentialvariable) { - return "!" + existentialvariable.getName(); + public static String getExistentialVarString(ExistentialVariable existentialVariable) { + return "!" + existentialVariable.getName(); } - public static String getUniversalVarString(UniversalVariable universalvariable) { - return "?" + universalvariable.getName(); + public static String getUniversalVarString(UniversalVariable universalVariable) { + return "?" + universalVariable.getName(); } - public static String getDatatypeConstantString(DatatypeConstant datatypeconstant) { - return datatypeconstant.getName(); - } - - public static String getNamedNullString(NamedNull namednull) { - return "_" + namednull.getName(); - } - - public static String getLanguageConstantString(LanguageStringConstant languagestringconstant) { - return languagestringconstant.getName(); + public static String getNamedNullString(NamedNull namedNull) { + return "_" + namedNull.getName(); } public static String getPredicateString(Predicate predicate) { return " Predicate [ name= " + predicate.getName() + ", arity= " + predicate.getArity() + "]"; } - public static String getDataSourceDeclarationString(DataSourceDeclaration datasourcedeclaration) { - return "@source " + getPredicateString(datasourcedeclaration.getPredicate()) + "(" - + datasourcedeclaration.getPredicate().getArity() + ") : " - + datasourcedeclaration.getDataSource().toConfigString() + " ."; + public static String getDataSourceDeclarationString(DataSourceDeclaration dataSourceDeclaration) { + return "@source " + dataSourceDeclaration.getPredicate().getName() + "(" + + dataSourceDeclaration.getPredicate().getArity() + ") : " + + dataSourceDeclaration.getDataSource().toConfigString() + " ."; } - public static String getConjunctionString(Conjunction conjunction) { + public static String getConjunctionString(Conjunction conjunction) { final StringBuilder stringBuilder = new StringBuilder(); boolean first = true; for (final Literal literal : conjunction.getLiterals()) { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java index c3c632c77..3394d45cb 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java @@ -182,7 +182,7 @@ public void negativeLiteralsNoNullElements() { } @Test - public void conjunctiontToStringTest() { + public void conjunctionToStringTest() { final Variable x = Expressions.makeUniversalVariable("X"); final Variable y = Expressions.makeUniversalVariable("Y"); final Constant c = Expressions.makeAbstractConstant("c"); From 33890bf622096205d899252ede54a9d6a16c100e Mon Sep 17 00:00:00 2001 From: alloka Date: Sun, 10 Nov 2019 20:37:08 +0100 Subject: [PATCH 0350/1003] added more tests and modified serializer conjunction --- .../core/model/implementation/Serializer.java | 2 +- .../vlog4j/core/model/ConjunctionImplTest.java | 10 ++++++---- .../vlog4j/core/model/RuleImplTest.java | 18 +++++++++++++++++- 3 files changed, 24 insertions(+), 6 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 839db2ff9..8ab8b7455 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -51,7 +51,7 @@ private Serializer() { } public static String getRuleString(Rule rule) { - return rule.getHead() + " :- " + rule.getBody() + "."; + return getConjunctionString(rule.getHead()) + " :- " + getConjunctionString(rule.getBody()) + "."; } public static String getLiteralString(Literal literal) { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java index 3394d45cb..db8fdcf30 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java @@ -190,10 +190,12 @@ public void conjunctionToStringTest() { final PositiveLiteral positiveLiteral1 = Expressions.makePositiveLiteral("p", x, c); final PositiveLiteral positiveLiteral2 = Expressions.makePositiveLiteral("p", y, x); final PositiveLiteral positiveLiteral3 = Expressions.makePositiveLiteral("q", x, d); - final List positiveLiteralList = Arrays.asList(positiveLiteral1, positiveLiteral2, - positiveLiteral3); - final Conjunction conjunction1 = new ConjunctionImpl<>(positiveLiteralList); - assertEquals("p(?X, c), p(?Y, ?X), q(?X, d)", conjunction1.toString()); + final NegativeLiteral NegativeLiteral = Expressions.makeNegativeLiteral("r", x, d); + final PositiveLiteral PositiveLiteral4 = Expressions.makePositiveLiteral("s", c, d); + final List LiteralList = Arrays.asList(positiveLiteral1, positiveLiteral2, positiveLiteral3, + NegativeLiteral, PositiveLiteral4); + final Conjunction conjunction1 = new ConjunctionImpl<>(LiteralList); + assertEquals("p(?X, c), p(?Y, ?X), q(?X, d), ~r(?X, d), s(c, d)", conjunction1.toString()); } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java index ef5dce07b..a2214fc92 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java @@ -23,13 +23,18 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; +import java.util.Arrays; +import java.util.List; + import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Literal; +import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; +import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; @@ -143,15 +148,26 @@ public void ruleToStringTest() { final Variable x = Expressions.makeUniversalVariable("X"); final Variable y = Expressions.makeExistentialVariable("Y"); final Variable z = Expressions.makeUniversalVariable("Z"); + final Variable y2 = Expressions.makeUniversalVariable("Y"); + final Constant d = Expressions.makeAbstractConstant("d"); final Constant c = Expressions.makeAbstractConstant("c"); final PositiveLiteral atom1 = Expressions.makePositiveLiteral("p", x, c); final PositiveLiteral atom2 = Expressions.makePositiveLiteral("p", x, z); final PositiveLiteral headAtom1 = Expressions.makePositiveLiteral("q", x, y); + final PositiveLiteral positiveLiteral1 = Expressions.makePositiveLiteral("p", x, c); + final PositiveLiteral positiveLiteral2 = Expressions.makePositiveLiteral("p", y2, x); + final PositiveLiteral positiveLiteral3 = Expressions.makePositiveLiteral("q", x, d); + final NegativeLiteral NegativeLiteral = Expressions.makeNegativeLiteral("r", x, d); + final PositiveLiteral PositiveLiteral4 = Expressions.makePositiveLiteral("s", c, d); + final List LiteralList = Arrays.asList(positiveLiteral1, positiveLiteral2, positiveLiteral3, + NegativeLiteral, PositiveLiteral4); final Conjunction bodyLiterals = Expressions.makeConjunction(atom1, atom2); final Conjunction headPositiveLiterals = Expressions.makePositiveConjunction(headAtom1); - final Conjunction bodyPositiveLiterals = Expressions.makePositiveConjunction(atom1, atom2); + final Conjunction bodyConjunction = new ConjunctionImpl<>(LiteralList); final Rule rule1 = new RuleImpl(headPositiveLiterals, bodyLiterals); + final Rule rule2 = new RuleImpl(headPositiveLiterals, bodyConjunction); assertEquals("q(?X, !Y) :- p(?X, c), p(?X, ?Z).", rule1.toString()); + assertEquals("q(?X, !Y) :- p(?X, c), p(?Y, ?X), q(?X, d), ~r(?X, d), s(c, d).", rule2.toString()); } From 706a3f212c8760b4d4afb2b976e9a6f8ca600e92 Mon Sep 17 00:00:00 2001 From: alloka Date: Sun, 10 Nov 2019 20:44:18 +0100 Subject: [PATCH 0351/1003] added languagestringconstant in rule test --- .../org/semanticweb/vlog4j/core/model/RuleImplTest.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java index a2214fc92..461a439f1 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java @@ -36,6 +36,7 @@ import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.model.implementation.LanguageStringConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; public class RuleImplTest { @@ -151,6 +152,7 @@ public void ruleToStringTest() { final Variable y2 = Expressions.makeUniversalVariable("Y"); final Constant d = Expressions.makeAbstractConstant("d"); final Constant c = Expressions.makeAbstractConstant("c"); + LanguageStringConstantImpl s = new LanguageStringConstantImpl("Test", "en"); final PositiveLiteral atom1 = Expressions.makePositiveLiteral("p", x, c); final PositiveLiteral atom2 = Expressions.makePositiveLiteral("p", x, z); final PositiveLiteral headAtom1 = Expressions.makePositiveLiteral("q", x, y); @@ -158,7 +160,7 @@ public void ruleToStringTest() { final PositiveLiteral positiveLiteral2 = Expressions.makePositiveLiteral("p", y2, x); final PositiveLiteral positiveLiteral3 = Expressions.makePositiveLiteral("q", x, d); final NegativeLiteral NegativeLiteral = Expressions.makeNegativeLiteral("r", x, d); - final PositiveLiteral PositiveLiteral4 = Expressions.makePositiveLiteral("s", c, d); + final PositiveLiteral PositiveLiteral4 = Expressions.makePositiveLiteral("s", c, s); final List LiteralList = Arrays.asList(positiveLiteral1, positiveLiteral2, positiveLiteral3, NegativeLiteral, PositiveLiteral4); final Conjunction bodyLiterals = Expressions.makeConjunction(atom1, atom2); @@ -167,7 +169,7 @@ public void ruleToStringTest() { final Rule rule1 = new RuleImpl(headPositiveLiterals, bodyLiterals); final Rule rule2 = new RuleImpl(headPositiveLiterals, bodyConjunction); assertEquals("q(?X, !Y) :- p(?X, c), p(?X, ?Z).", rule1.toString()); - assertEquals("q(?X, !Y) :- p(?X, c), p(?Y, ?X), q(?X, d), ~r(?X, d), s(c, d).", rule2.toString()); + assertEquals("q(?X, !Y) :- p(?X, c), p(?Y, ?X), q(?X, d), ~r(?X, d), s(c, \"Test\"@en).", rule2.toString()); } From d6e07b44a077b708ff8f7314119d90f02c7469c1 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Mon, 11 Nov 2019 17:40:11 +0100 Subject: [PATCH 0352/1003] throw Not Supported exception for encountering nominals in complex class expressions --- .../owlapi/ClassToRuleBodyConverter.java | 28 +------- .../owlapi/OwlAxiomToRulesConverterTest.java | 65 +++++++++++++++---- 2 files changed, 56 insertions(+), 37 deletions(-) diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleBodyConverter.java b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleBodyConverter.java index 01ea37ff9..027ebb4af 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleBodyConverter.java +++ b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleBodyConverter.java @@ -1,7 +1,5 @@ package org.semanticweb.vlog4j.owlapi; -import java.util.ArrayList; - /*- * #%L * VLog4j OWL API Support @@ -23,8 +21,6 @@ */ import java.util.Arrays; -import java.util.List; -import java.util.function.UnaryOperator; import java.util.stream.Collectors; import org.semanticweb.owlapi.model.OWLClass; @@ -46,7 +42,6 @@ import org.semanticweb.owlapi.model.OWLObjectOneOf; import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom; import org.semanticweb.owlapi.model.OWLObjectUnionOf; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Variable; @@ -156,28 +151,11 @@ public void visit(final OWLObjectHasSelf ce) { this.body); } + // TODO support this feature @Override public void visit(final OWLObjectOneOf ce) { - System.out.println("Body"); - System.out.println(this.body.getConjuncts()); - System.out.println("Head"); - System.out.println(this.head.getConjuncts()); - - System.out.println("Parent body: " + parent.rules); - - ce.individuals().forEach(individual -> { - final Term individualTerm = OwlToRulesConversionHelper.getIndividualTerm(individual); - - if (this.body.exists()) { - SimpleConjunction newBody = RulesHelper.replaceTerm(this.body, this.mainTerm, individualTerm); - System.out.println("New Body: " + newBody); - } - if (this.head.exists()) { - SimpleConjunction newHead = RulesHelper.replaceTerm(this.head, this.mainTerm, individualTerm); - System.out.println("New Head: " + newHead); - } - - }); + throw new OwlFeatureNotSupportedException( + "OWLObjectOneOf in complex class expressions currently not supported!"); } @Override diff --git a/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverterTest.java b/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverterTest.java index ac5184f91..5873e4585 100644 --- a/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverterTest.java +++ b/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverterTest.java @@ -97,7 +97,7 @@ public static Predicate getPropertyPredicate(final String localName) { static final OWLIndividual inda = df.getOWLNamedIndividual(getIri("a")); static final OWLIndividual indb = df.getOWLNamedIndividual(getIri("b")); static final OWLIndividual indc = df.getOWLNamedIndividual(getIri("c")); - + static final Term consta = Expressions.makeAbstractConstant(getIri("a").toString()); static final Term constb = Expressions.makeAbstractConstant(getIri("b").toString()); static final Term constc = Expressions.makeAbstractConstant(getIri("c").toString()); @@ -393,7 +393,6 @@ public void testClassAssertions() { Ca.accept(converter); BandhasRba.accept(converter); - final PositiveLiteral atC = Expressions.makePositiveLiteral(nC, constc); final PositiveLiteral atB = Expressions.makePositiveLiteral(nB, consta); final PositiveLiteral atR = Expressions.makePositiveLiteral(nR, consta, constb); @@ -636,6 +635,9 @@ public void testObjectPropertyRange() { assertEquals(Collections.singleton(rule), converter.rules); } + /* + * {a} \sqsubseteq A + */ @Test public void testNominalSubClassOfClass() { OWLObjectOneOf oneOfa = df.getOWLObjectOneOf(inda); @@ -643,12 +645,15 @@ public void testNominalSubClassOfClass() { final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); axiom.accept(converter); - + final Fact expectedFact = Expressions.makeFact(nA, consta); assertEquals(Collections.singleton(expectedFact), converter.facts); assertTrue(converter.rules.isEmpty()); } + /* + * {a,b} \sqsubseteq A + */ @Test public void testNominalsSubClassOfClass() { OWLObjectOneOf oneOfab = df.getOWLObjectOneOf(inda, indb); @@ -660,23 +665,56 @@ public void testNominalsSubClassOfClass() { final Fact expectedFact1 = Expressions.makeFact(nA, consta); final Fact expectedFact2 = Expressions.makeFact(nA, constb); - assertEquals(Sets.newSet(expectedFact1,expectedFact2), converter.facts); + assertEquals(Sets.newSet(expectedFact1, expectedFact2), converter.facts); assertTrue(converter.rules.isEmpty()); } + + /* + * ({a,b} \sqcap B) \sqsubseteq A + */ + @Test(expected = OwlFeatureNotSupportedException.class) + // TODO support this feature + public void testNominalsInConjunctionLeftSubClassOfClass() { + OWLObjectOneOf oneOfab = df.getOWLObjectOneOf(inda, indb); + OWLObjectIntersectionOf conjunction = df.getOWLObjectIntersectionOf(oneOfab, cB); + OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(conjunction, cA); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + } - @Test - public void testNominalsInConjunctionSubClassOfClass() { + /* + * (B \sqcap {a,b}) \sqsubseteq A + */ + @Test(expected = OwlFeatureNotSupportedException.class) + // TODO support this feature + public void testNominalsInConjunctionRightSubClassOfClass() { OWLObjectOneOf oneOfab = df.getOWLObjectOneOf(inda, indb); - OWLObjectIntersectionOf conjunction = df.getOWLObjectIntersectionOf(oneOfab,cB); + OWLObjectIntersectionOf conjunction = df.getOWLObjectIntersectionOf(cB, oneOfab); OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(conjunction, cA); final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); axiom.accept(converter); -//TODO - System.out.println(converter.rules); - } + + /* + * A \sqsubseteq (B \sqcap {a,b}) + */ + @Test(expected = OwlFeatureNotSupportedException.class) + public void testClassSubClassOfNominalsInConjunctionRight() { + OWLObjectOneOf oneOfab = df.getOWLObjectOneOf(inda, indb); + OWLObjectIntersectionOf conjunction = df.getOWLObjectIntersectionOf(cB, oneOfab); + OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(cA, conjunction); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + } + + + /* + * A \sqsubseteq {a} + */ @Test(expected = OwlFeatureNotSupportedException.class) public void testNominalSuperClassOfClass() { OWLObjectOneOf oneOfa = df.getOWLObjectOneOf(inda); @@ -685,11 +723,14 @@ public void testNominalSuperClassOfClass() { final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); axiom.accept(converter); } - + + /* + * A \sqsubseteq {a,b} + */ @Test(expected = OwlFeatureNotSupportedException.class) public void testNominalsSuperClassOfClass() { OWLObjectOneOf oneOfab = df.getOWLObjectOneOf(inda, indb); - OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(cA,oneOfab); + OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(cA, oneOfab); final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); axiom.accept(converter); From f39d071fff386c9456dc44e30b0e93f7bd47a039 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Mon, 11 Nov 2019 17:44:54 +0100 Subject: [PATCH 0353/1003] remove unused helper class. --- .../vlog4j/owlapi/RulesHelper.java | 42 ------------------- .../vlog4j/owlapi/TestRulesHelper.java | 25 ----------- 2 files changed, 67 deletions(-) delete mode 100644 vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/RulesHelper.java delete mode 100644 vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/TestRulesHelper.java diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/RulesHelper.java b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/RulesHelper.java deleted file mode 100644 index 74e9c1822..000000000 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/RulesHelper.java +++ /dev/null @@ -1,42 +0,0 @@ -package org.semanticweb.vlog4j.owlapi; - -import java.util.ArrayList; -import java.util.List; -import java.util.function.UnaryOperator; - -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; -import org.semanticweb.vlog4j.owlapi.AbstractClassToRuleConverter.SimpleConjunction; - -public final class RulesHelper { - - private RulesHelper() {} - - static SimpleConjunction replaceTerm(SimpleConjunction conjunction, Term mainTerm, Term individualTerm) { - SimpleConjunction newSimpleConjunction = new SimpleConjunction(); - conjunction.getConjuncts().forEach(conjunct -> { - PositiveLiteral newLiteral = replaceTerm(conjunct, mainTerm, individualTerm); - newSimpleConjunction.add(newLiteral); - }); - return newSimpleConjunction; - } - - static PositiveLiteral replaceTerm(PositiveLiteral positiveLiteral, Term sourceTerm, Term targetTerm) { - - List arguments = positiveLiteral.getArguments(); - List modifiableArguments = replaceTerm(sourceTerm, targetTerm, arguments); - - return new PositiveLiteralImpl(positiveLiteral.getPredicate(), modifiableArguments); - } - - static List replaceTerm(Term sourceTerm, Term targetTerm, List terms) { - List newTerms = new ArrayList<>(terms); - - UnaryOperator replaceSourceTerm = term -> term.equals(sourceTerm) ? targetTerm : term; - newTerms.replaceAll(replaceSourceTerm); - - return newTerms; - } - -} diff --git a/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/TestRulesHelper.java b/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/TestRulesHelper.java deleted file mode 100644 index f7cee4055..000000000 --- a/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/TestRulesHelper.java +++ /dev/null @@ -1,25 +0,0 @@ -package org.semanticweb.vlog4j.owlapi; - -import static org.junit.Assert.assertEquals; - -import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.AbstractConstant; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.UniversalVariable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; - -public class TestRulesHelper { - - @Test - public void testReplaceTerm() { - AbstractConstant c1 = Expressions.makeAbstractConstant("c1"); - UniversalVariable v1 = Expressions.makeUniversalVariable("v1"); - UniversalVariable v2 = Expressions.makeUniversalVariable("v2"); - - PositiveLiteral positiveLiteral = Expressions.makePositiveLiteral("a", v1, v1, v2, c1); - - PositiveLiteral expectedLiteral = Expressions.makePositiveLiteral("a", c1, c1, v2, c1); - assertEquals(expectedLiteral, RulesHelper.replaceTerm(positiveLiteral, v1, c1)); - } - -} From 0e9868bc8091ad9699ae7ee4b524643fd7c42cec Mon Sep 17 00:00:00 2001 From: alloka Date: Mon, 11 Nov 2019 17:47:07 +0100 Subject: [PATCH 0354/1003] added getSyntacticRepresentation in every data model and moved any string manipulation in models to serializer --- .../picocli/VLog4jClientMaterialize.java | 1 - .../core/model/api/DataSourceDeclaration.java | 2 +- .../vlog4j/core/model/api/Entity.java | 32 ++++++++++++ .../vlog4j/core/model/api/Literal.java | 2 +- .../vlog4j/core/model/api/Predicate.java | 2 +- .../vlog4j/core/model/api/Rule.java | 2 +- .../vlog4j/core/model/api/Term.java | 2 +- .../implementation/AbstractConstantImpl.java | 14 ++++-- .../implementation/AbstractLiteralImpl.java | 6 ++- .../model/implementation/ConjunctionImpl.java | 6 ++- .../DataSourceDeclarationImpl.java | 6 ++- .../implementation/DatatypeConstantImpl.java | 8 ++- .../ExistentialVariableImpl.java | 6 ++- .../core/model/implementation/FactImpl.java | 2 +- .../LanguageStringConstantImpl.java | 8 ++- .../model/implementation/NamedNullImpl.java | 6 ++- .../model/implementation/PredicateImpl.java | 6 ++- .../core/model/implementation/RuleImpl.java | 7 ++- .../core/model/implementation/Serializer.java | 50 ++++++++++++++----- .../implementation/UniversalVariableImpl.java | 6 ++- .../reasoner/implementation/VLogReasoner.java | 23 ++++++--- 21 files changed, 154 insertions(+), 43 deletions(-) create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java index 7758a0065..969d7d0f7 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java +++ b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java @@ -37,7 +37,6 @@ import org.semanticweb.vlog4j.examples.ExamplesUtils; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; - import picocli.CommandLine.ArgGroup; import picocli.CommandLine.Command; import picocli.CommandLine.Option; diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSourceDeclaration.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSourceDeclaration.java index 58bea5785..dbd97c360 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSourceDeclaration.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSourceDeclaration.java @@ -27,7 +27,7 @@ * @author Markus Kroetzsch * */ -public interface DataSourceDeclaration extends Statement { +public interface DataSourceDeclaration extends Statement, SyntacticRepresentation{ /** * Returns the {@link Predicate} that this source applies to. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java new file mode 100644 index 000000000..a68806024 --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java @@ -0,0 +1,32 @@ +package org.semanticweb.vlog4j.core.model.api; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + + +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + +public interface SyntacticRepresentation { + + String getSyntacticRepresentation(); + + + +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Literal.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Literal.java index a5b5340d8..53d4980b9 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Literal.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Literal.java @@ -31,7 +31,7 @@ * @author david.carral@tu-dresden.de * @author Irina Dragoste */ -public interface Literal extends SyntaxObject { +public interface Literal extends SyntaxObject, SyntacticRepresentation { boolean isNegated(); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java index aaa126fa6..82a4126bb 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java @@ -29,7 +29,7 @@ * @author Irina Dragoste * */ -public interface Predicate { +public interface Predicate extends SyntacticRepresentation { /** * The name of the Predicate. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Rule.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Rule.java index a471a1cab..ebc937fed 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Rule.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Rule.java @@ -30,7 +30,7 @@ * @author Markus Krötzsch * */ -public interface Rule extends SyntaxObject, Statement { +public interface Rule extends SyntaxObject, Statement, SyntacticRepresentation { /** * Returns the conjunction of head literals (the consequence of the rule). diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Term.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Term.java index 1cf3fdbb8..11c2ef6e8 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Term.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Term.java @@ -27,7 +27,7 @@ * @author david.carral@tu-dresden.de * @author Markus Krötzsch */ -public interface Term { +public interface Term extends SyntacticRepresentation { /** * Returns the name this term. The name uniquely identifies terms of the same diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractConstantImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractConstantImpl.java index 86e97a061..140c6312e 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractConstantImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractConstantImpl.java @@ -31,10 +31,10 @@ public class AbstractConstantImpl extends AbstractTermImpl implements AbstractConstant { /** - * Instantiates a {@code ConstantImpl} object with the name {@code name}. + * Instantiates a {@code ConstantImpl} object with the name + * {@code name}. * - * @param name - * cannot be a blank String (null, empty or whitespace). + * @param name cannot be a blank String (null, empty or whitespace). */ public AbstractConstantImpl(final String name) { super(name); @@ -44,9 +44,13 @@ public AbstractConstantImpl(final String name) { public T accept(TermVisitor termVisitor) { return termVisitor.visit(this); } - + + public String getSyntacticRepresentation() { + return Serializer.getString(this); + } + @Override public String toString() { - return Serializer.getConstantString(this); + return getSyntacticRepresentation(); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractLiteralImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractLiteralImpl.java index 80bd418e6..a912bb6f6 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractLiteralImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractLiteralImpl.java @@ -92,9 +92,13 @@ public boolean equals(final Object obj) { && this.getArguments().equals(other.getArguments()); } + public String getSyntacticRepresentation() { + return Serializer.getString(this); + } + @Override public String toString() { - return Serializer.getLiteralString(this); + return getSyntacticRepresentation(); } @Override diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java index c28374ad7..3295d7d1a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java @@ -85,9 +85,13 @@ public Iterator iterator() { return getLiterals().iterator(); } + public String getSyntacticRepresentation() { + return Serializer.getString(this); + } + @Override public String toString() { - return Serializer.getConjunctionString(this); + return getSyntacticRepresentation(); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java index a40eae7b7..ce0edbf1d 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java @@ -83,9 +83,13 @@ public T accept(StatementVisitor statementVisitor) { return statementVisitor.visit(this); } + public String getSyntacticRepresentation() { + return Serializer.getString(this); + } + @Override public String toString() { - return Serializer.getDataSourceDeclarationString(this); + return getSyntacticRepresentation(); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java index 0f7d85135..9242ae321 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java @@ -58,14 +58,18 @@ public String getLexicalValue() { return this.lexicalValue; } + public String getSyntacticRepresentation() { + return Serializer.getString(this); + } + @Override public String toString() { - return Serializer.getConstantString(this); + return getSyntacticRepresentation(); } @Override public String getName() { - return "\"" + lexicalValue.replace("\\", "\\\\").replace("\"", "\\\"") + "\"^^<" + datatype + ">"; + return Serializer.getDatatypeConstantName(this); } @Override diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ExistentialVariableImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ExistentialVariableImpl.java index a570bd615..33092db87 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ExistentialVariableImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ExistentialVariableImpl.java @@ -44,8 +44,12 @@ public T accept(TermVisitor termVisitor) { return termVisitor.visit(this); } + public String getSyntacticRepresentation() { + return Serializer.getString(this); + } + @Override public String toString() { - return Serializer.getExistentialVarString(this); + return getSyntacticRepresentation(); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java index 63a931e99..2fc3d3d81 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java @@ -50,7 +50,7 @@ public T accept(StatementVisitor statementVisitor) { @Override public String toString() { - return Serializer.getLiteralString(this) + "."; + return Serializer.getString(this); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java index 7e8657216..31ca5e440 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java @@ -44,7 +44,7 @@ public LanguageStringConstantImpl(String string, String languageTag) { @Override public String getName() { - return "\"" + string.replace("\\", "\\\\").replace("\"", "\\\"") + "\"@" + lang; + return Serializer.getLanguageStringConstantName(this); } @Override @@ -82,9 +82,13 @@ public boolean equals(Object obj) { return this.string.equals(other.getString()) && this.lang.equals(other.getLanguageTag()); } + public String getSyntacticRepresentation() { + return Serializer.getString(this); + } + @Override public String toString() { - return Serializer.getConstantString(this); + return getSyntacticRepresentation(); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NamedNullImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NamedNullImpl.java index d6b1d2e9b..0b4e44640 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NamedNullImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NamedNullImpl.java @@ -46,8 +46,12 @@ public T accept(TermVisitor termVisitor) { return termVisitor.visit(this); } + public String getSyntacticRepresentation() { + return Serializer.getString(this); + } + @Override public String toString() { - return Serializer.getNamedNullString(this); + return getSyntacticRepresentation(); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PredicateImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PredicateImpl.java index 27d632c6d..85e9230df 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PredicateImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PredicateImpl.java @@ -85,9 +85,13 @@ public boolean equals(Object obj) { return this.arity == other.getArity() && this.name.equals(other.getName()); } + public String getSyntacticRepresentation() { + return Serializer.getString(this); + } + @Override public String toString() { - return Serializer.getPredicateString(this); + return getSyntacticRepresentation(); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java index c83e79a30..8eabef7a1 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java @@ -103,9 +103,14 @@ public boolean equals(final Object obj) { return this.head.equals(other.getHead()) && this.body.equals(other.getBody()); } + + public String getSyntacticRepresentation() { + return Serializer.getString(this); + } + @Override public String toString() { - return Serializer.getRuleString(this); + return getSyntacticRepresentation(); } @Override diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 8ab8b7455..68bf4e407 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -50,11 +50,11 @@ private Serializer() { } - public static String getRuleString(Rule rule) { - return getConjunctionString(rule.getHead()) + " :- " + getConjunctionString(rule.getBody()) + "."; + public static String getString(Rule rule) { + return getString(rule.getHead()) + " :- " + getString(rule.getBody()) + "."; } - public static String getLiteralString(Literal literal) { + public static String getString(Literal literal) { final StringBuilder stringBuilder = new StringBuilder(""); if (literal.isNegated()) { stringBuilder.append("~"); @@ -67,39 +67,55 @@ public static String getLiteralString(Literal literal) { } else { stringBuilder.append(", "); } - stringBuilder.append(term); + stringBuilder.append(term.getSyntacticRepresentation()); } stringBuilder.append(")"); return stringBuilder.toString(); } - public static String getConstantString(Constant constant) { + public static String getString(Fact fact) { + final StringBuilder stringBuilder = new StringBuilder(""); + stringBuilder.append(fact.getPredicate().getName()).append("("); + boolean first = true; + for (final Term term : fact.getArguments()) { + if (first) { + first = false; + } else { + stringBuilder.append(", "); + } + stringBuilder.append(term.getSyntacticRepresentation()); + } + stringBuilder.append(")."); + return stringBuilder.toString(); + } + + public static String getString(Constant constant) { return constant.getName(); } - public static String getExistentialVarString(ExistentialVariable existentialVariable) { + public static String getString(ExistentialVariable existentialVariable) { return "!" + existentialVariable.getName(); } - public static String getUniversalVarString(UniversalVariable universalVariable) { + public static String getString(UniversalVariable universalVariable) { return "?" + universalVariable.getName(); } - public static String getNamedNullString(NamedNull namedNull) { + public static String getString(NamedNull namedNull) { return "_" + namedNull.getName(); } - public static String getPredicateString(Predicate predicate) { + public static String getString(Predicate predicate) { return " Predicate [ name= " + predicate.getName() + ", arity= " + predicate.getArity() + "]"; } - public static String getDataSourceDeclarationString(DataSourceDeclaration dataSourceDeclaration) { + public static String getString(DataSourceDeclaration dataSourceDeclaration) { return "@source " + dataSourceDeclaration.getPredicate().getName() + "(" + dataSourceDeclaration.getPredicate().getArity() + ") : " + dataSourceDeclaration.getDataSource().toConfigString() + " ."; } - public static String getConjunctionString(Conjunction conjunction) { + public static String getString(Conjunction conjunction) { final StringBuilder stringBuilder = new StringBuilder(); boolean first = true; for (final Literal literal : conjunction.getLiterals()) { @@ -108,9 +124,19 @@ public static String getConjunctionString(Conjunction conjunc } else { stringBuilder.append(", "); } - stringBuilder.append(getLiteralString(literal)); + stringBuilder.append(getString(literal)); } return stringBuilder.toString(); } + public static String getLanguageStringConstantName(LanguageStringConstant languageStringConstant) { + return "\"" + languageStringConstant.getString().replace("\\", "\\\\").replace("\"", "\\\"") + "\"@" + + languageStringConstant.getLanguageTag(); + } + + public static String getDatatypeConstantName(DatatypeConstant datatypeConstant) { + return "\"" + datatypeConstant.getLexicalValue().replace("\\", "\\\\").replace("\"", "\\\"") + "\"^^<" + + datatypeConstant.getDatatype() + ">"; + } + } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/UniversalVariableImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/UniversalVariableImpl.java index 838eab7f3..82493488d 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/UniversalVariableImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/UniversalVariableImpl.java @@ -44,8 +44,12 @@ public T accept(TermVisitor termVisitor) { return termVisitor.visit(this); } + public String getSyntacticRepresentation() { + return Serializer.getString(this); + } + @Override public String toString() { - return Serializer.getUniversalVarString(this); + return getSyntacticRepresentation(); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index d355e17f2..89a530237 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -28,6 +28,7 @@ import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; import org.semanticweb.vlog4j.core.model.implementation.UniversalVariableImpl; import org.semanticweb.vlog4j.core.reasoner.AcyclicityNotion; import org.semanticweb.vlog4j.core.reasoner.Algorithm; @@ -129,6 +130,15 @@ public boolean equals(Object obj) { return predicate.equals(other.predicate); } + public String getSyntacticRepresentation() { + return Serializer.getString(this); + } + + @Override + public String toString() { + return getSyntacticRepresentation(); + } + } /** @@ -740,19 +750,18 @@ public void onStatementsAdded(List statementsAdded) { // TODO more elaborate materialisation state handling updateReasonerToKnowledgeBaseChanged(); - - //updateCorrectnessOnStatementsAdded(statementsAdded); + + // updateCorrectnessOnStatementsAdded(statementsAdded); updateCorrectness(); } - @Override public void onStatementAdded(Statement statementAdded) { // TODO more elaborate materialisation state handling updateReasonerToKnowledgeBaseChanged(); - - //updateCorrectnessOnStatementAdded(statementAdded); + + // updateCorrectnessOnStatementAdded(statementAdded); updateCorrectness(); } @@ -766,9 +775,9 @@ private void updateReasonerToKnowledgeBaseChanged() { private void updateCorrectness() { if (this.reasonerState == ReasonerState.KB_CHANGED) { - + final boolean noRules = this.knowledgeBase.getRules().isEmpty(); - this.correctness = noRules? Correctness.SOUND_BUT_INCOMPLETE : Correctness.INCORRECT; + this.correctness = noRules ? Correctness.SOUND_BUT_INCOMPLETE : Correctness.INCORRECT; } } From 08642b206b0046927facba9a75796251f80f2b78 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Mon, 11 Nov 2019 18:06:35 +0100 Subject: [PATCH 0355/1003] added unit test for checking bug#104 --- .../vlog4j/owlapi/OwlAxiomToRulesConverterTest.java | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverterTest.java b/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverterTest.java index 6e10442ac..badeed248 100644 --- a/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverterTest.java +++ b/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverterTest.java @@ -631,6 +631,19 @@ public void testObjectPropertyRange() { assertEquals(Collections.singleton(rule), converter.rules); } + + /* + * A \sqsubseteq <1 .R + */ + @Test(expected = OwlFeatureNotSupportedException.class) + public void testSubClassOfMaxCardinality() { + + OWLClassExpression maxCard = df.getOWLObjectMaxCardinality(1, pR); + OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(cA, maxCard ); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + } @Ignore public void test() { From 6849afdeaadddee075b633f98a96cbf0f04fbb5e Mon Sep 17 00:00:00 2001 From: alloka Date: Mon, 11 Nov 2019 19:23:53 +0100 Subject: [PATCH 0356/1003] added more coverage tests --- .../vlog4j/core/model/api/DataSourceDeclaration.java | 2 +- .../semanticweb/vlog4j/core/model/api/Entity.java | 2 +- .../semanticweb/vlog4j/core/model/api/Literal.java | 4 +++- .../semanticweb/vlog4j/core/model/api/Predicate.java | 2 +- .../org/semanticweb/vlog4j/core/model/api/Rule.java | 2 +- .../org/semanticweb/vlog4j/core/model/api/Term.java | 2 +- .../vlog4j/core/model/NegativeLiteralImplTest.java | 11 +++++++++++ .../vlog4j/core/model/PositiveLiteralImplTest.java | 12 ++++++++++++ .../vlog4j/core/model/PredicateImplTest.java | 6 ++++++ 9 files changed, 37 insertions(+), 6 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSourceDeclaration.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSourceDeclaration.java index dbd97c360..357c85ed8 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSourceDeclaration.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSourceDeclaration.java @@ -27,7 +27,7 @@ * @author Markus Kroetzsch * */ -public interface DataSourceDeclaration extends Statement, SyntacticRepresentation{ +public interface DataSourceDeclaration extends Statement, Entity{ /** * Returns the {@link Predicate} that this source applies to. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java index a68806024..e8b2bfcd9 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java @@ -23,7 +23,7 @@ import org.semanticweb.vlog4j.core.model.implementation.Serializer; -public interface SyntacticRepresentation { +public interface Entity { String getSyntacticRepresentation(); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Literal.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Literal.java index 53d4980b9..fab5c530a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Literal.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Literal.java @@ -22,6 +22,8 @@ import java.util.List; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /** * Interface for literals. A positive literal is simply an atomic formula, i.e., * a formula of the form P(t1,...,tn) where P is a {@link Predicate} of arity n @@ -31,7 +33,7 @@ * @author david.carral@tu-dresden.de * @author Irina Dragoste */ -public interface Literal extends SyntaxObject, SyntacticRepresentation { +public interface Literal extends SyntaxObject, Entity { boolean isNegated(); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java index 82a4126bb..87bd036c7 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java @@ -29,7 +29,7 @@ * @author Irina Dragoste * */ -public interface Predicate extends SyntacticRepresentation { +public interface Predicate extends Entity { /** * The name of the Predicate. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Rule.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Rule.java index ebc937fed..0eb6cc325 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Rule.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Rule.java @@ -30,7 +30,7 @@ * @author Markus Krötzsch * */ -public interface Rule extends SyntaxObject, Statement, SyntacticRepresentation { +public interface Rule extends SyntaxObject, Statement, Entity { /** * Returns the conjunction of head literals (the consequence of the rule). diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Term.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Term.java index 11c2ef6e8..582493c69 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Term.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Term.java @@ -27,7 +27,7 @@ * @author david.carral@tu-dresden.de * @author Markus Krötzsch */ -public interface Term extends SyntacticRepresentation { +public interface Term extends Entity { /** * Returns the name this term. The name uniquely identifies terms of the same diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/NegativeLiteralImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/NegativeLiteralImplTest.java index 8feba86e3..d22881e84 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/NegativeLiteralImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/NegativeLiteralImplTest.java @@ -142,4 +142,15 @@ public void termSizeMatchesPredicateArity() { Expressions.makeUniversalVariable("X")); } + @Test + public void negativeLiteralTostringTest() { + final Variable x = Expressions.makeUniversalVariable("X"); + final Constant c = Expressions.makeAbstractConstant("c"); + final Predicate predicateP = new PredicateImpl("p", 2); + final Literal atom2 = Expressions.makeNegativeLiteral("p", x, c); + final Literal atom3 = new NegativeLiteralImpl(predicateP, Arrays.asList(x, c)); + assertEquals("~p(?X, c)", atom2.toString()); + assertEquals("~p(?X, c)", atom3.toString()); + + } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PositiveLiteralImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PositiveLiteralImplTest.java index c358cecdf..ff1d5bbe3 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PositiveLiteralImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PositiveLiteralImplTest.java @@ -139,4 +139,16 @@ public void termSizeMatchesPredicateArity() { Expressions.makeUniversalVariable("X")); } + @Test + public void positiveLiteralTostringTest() { + final Variable x = Expressions.makeUniversalVariable("X"); + final Constant c = Expressions.makeAbstractConstant("c"); + final Predicate predicateP = new PredicateImpl("p", 2); + final Literal atom2 = Expressions.makePositiveLiteral("p", x, c); + final Literal atom3 = new PositiveLiteralImpl(predicateP, Arrays.asList(x, c)); + assertEquals("p(?X, c)", atom2.toString()); + assertEquals("p(?X, c)", atom3.toString()); + + } + } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PredicateImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PredicateImplTest.java index 30862eb5e..21f184915 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PredicateImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PredicateImplTest.java @@ -73,4 +73,10 @@ public void arityZero() { new PredicateImpl("p", 0); } + @Test + public void predicateToStringTest() { + final Predicate p1 = new PredicateImpl("p", 1); + assertEquals(" Predicate [ name= p, arity= 1]", p1.toString()); + } + } From 02eae7f5413132fc1ff87930db4e7782b48165cc Mon Sep 17 00:00:00 2001 From: alloka Date: Mon, 11 Nov 2019 19:24:39 +0100 Subject: [PATCH 0357/1003] added more coverage tests --- .../java/org/semanticweb/vlog4j/core/model/api/Entity.java | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java index e8b2bfcd9..c6cfd12f5 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java @@ -20,13 +20,10 @@ * #L% */ - import org.semanticweb.vlog4j.core.model.implementation.Serializer; public interface Entity { - - String getSyntacticRepresentation(); - + String getSyntacticRepresentation(); } From fa461307bb6acfa3cc38ba227e4fd7fcc1f64ccd Mon Sep 17 00:00:00 2001 From: alloka Date: Tue, 12 Nov 2019 00:13:25 +0100 Subject: [PATCH 0358/1003] added toStrings for DataSourceDeclarations --- .../vlog4j/core/model/api/DataSource.java | 2 +- .../core/model/implementation/Serializer.java | 5 ++-- .../implementation/CsvFileDataSource.java | 5 ++++ .../implementation/InMemoryDataSource.java | 6 ++++ .../implementation/RdfFileDataSource.java | 5 ++++ .../SparqlQueryResultDataSource.java | 6 ++++ .../core/model/DataSourceDeclarationTest.java | 30 +++++++++++++++++++ 7 files changed, 55 insertions(+), 4 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSource.java index abaaa9d03..d085716e6 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSource.java @@ -26,7 +26,7 @@ * @author Irina Dragoste * */ -public interface DataSource { +public interface DataSource extends Entity { /** * Constructs a String representation of the data source. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 68bf4e407..0af0baefe 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -24,6 +24,7 @@ import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; import org.semanticweb.vlog4j.core.model.api.ExistentialVariable; @@ -110,9 +111,7 @@ public static String getString(Predicate predicate) { } public static String getString(DataSourceDeclaration dataSourceDeclaration) { - return "@source " + dataSourceDeclaration.getPredicate().getName() + "(" - + dataSourceDeclaration.getPredicate().getArity() + ") : " - + dataSourceDeclaration.getDataSource().toConfigString() + " ."; + return dataSourceDeclaration.getDataSource().getSyntacticRepresentation(); } public static String getString(Conjunction conjunction) { diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/CsvFileDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/CsvFileDataSource.java index 6ffafa6d9..2fa42eb07 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/CsvFileDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/CsvFileDataSource.java @@ -73,4 +73,9 @@ public String toString() { return "CsvFileDataSource [csvFile=" + getFile() + "]"; } + @Override + public String getSyntacticRepresentation() { + return "load-csv(\"" + getFile() + "\") ."; + } + } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java index 0a363f734..c602b613a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java @@ -101,4 +101,10 @@ public String toConfigString() { return null; } + @Override + public String getSyntacticRepresentation() { + // TODO Auto-generated method stub + return null; + } + } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSource.java index 5b8ac21d1..ee5cc49ee 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSource.java @@ -71,4 +71,9 @@ public String toString() { return "RdfFileDataSource [rdfFile=" + getFile() + "]"; } + @Override + public String getSyntacticRepresentation() { + return "load-rdf(\"" + getFile() + "\") ."; + } + } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java index c4f83be33..29578fbf2 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java @@ -164,4 +164,10 @@ public String toString() { + ", queryBody=" + queryBody + "]"; } + @Override + public String getSyntacticRepresentation() { + return "Sparql(\"" + endpoint + "\"" + ", \"" + queryVariables + "\"" + + ", \"" + queryBody + "\") ."; + } + } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java index 3091216d5..ff34b2c5a 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java @@ -22,15 +22,23 @@ import static org.junit.Assert.*; +import java.io.File; +import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; +import java.util.Arrays; +import java.util.LinkedHashSet; import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.FileDataSourceTestUtils; +import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; public class DataSourceDeclarationTest { @@ -74,4 +82,26 @@ public void dataSourceDecalarationToStringTest() throws MalformedURLException { DataSourceDeclaration dataSourceDeclaration2 = new DataSourceDeclarationImpl(predicate2, dataSource2); assertEquals(dataSourceDeclaration1.toString(), dataSourceDeclaration2.toString()); } + + @Test + public void DataSourceDeclarationToStringTest() throws IOException { + final String csvFile = FileDataSourceTestUtils.INPUT_FOLDER + "file.csv"; + final File unzippedRdfFile = new File(FileDataSourceTestUtils.INPUT_FOLDER + "file.nt"); + Predicate predicate1 = Expressions.makePredicate("p", 3); + Predicate predicate2 = Expressions.makePredicate("q", 1); + final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(new URL("https://example.org/"), + "var", "?var wdt:P31 wd:Q5 ."); + final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File(csvFile)); + final RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(unzippedRdfFile); + final DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource); + final DataSourceDeclaration dataSourceDeclaration2 = new DataSourceDeclarationImpl(predicate2, + unzippedCsvFileDataSource); + final DataSourceDeclaration dataSourceDeclaration3 = new DataSourceDeclarationImpl(predicate2, + unzippedRdfFileDataSource); + assertEquals("Sparql(\"https://example.org/\", \"var\", \"?var wdt:P31 wd:Q5 .\") .", + dataSourceDeclaration1.toString()); + assertEquals("load-csv(\"src/test/data/input/file.csv\") .", dataSourceDeclaration2.toString()); + assertEquals("load-rdf(\"src/test/data/input/file.nt\") .", dataSourceDeclaration3.toString()); + + } } From 3f82460a78382d9773aca8ef3c4ca676e0206d73 Mon Sep 17 00:00:00 2001 From: alloka Date: Tue, 12 Nov 2019 19:57:33 +0100 Subject: [PATCH 0359/1003] added EntityTest class for tostring roundtrip tests --- .../vlog4j/core/model/api/Entity.java | 7 + .../core/model/implementation/Serializer.java | 4 +- .../SparqlQueryResultDataSource.java | 32 ++-- .../core/model/DataSourceDeclarationTest.java | 10 +- .../vlog4j/syntax/parser/EntityTest.java | 152 ++++++++++++++++++ 5 files changed, 182 insertions(+), 23 deletions(-) create mode 100644 vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java index c6cfd12f5..73b5d05ec 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java @@ -22,6 +22,13 @@ import org.semanticweb.vlog4j.core.model.implementation.Serializer; +/** + * Interface with getSyntacticRepresentation method that must be extended by any + * data model that can be parsed in order to obtain its correct parsable string. + * + * @author Ali Elhalawati + * + */ public interface Entity { String getSyntacticRepresentation(); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 0af0baefe..a0673a697 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -111,7 +111,9 @@ public static String getString(Predicate predicate) { } public static String getString(DataSourceDeclaration dataSourceDeclaration) { - return dataSourceDeclaration.getDataSource().getSyntacticRepresentation(); + return "@source " + dataSourceDeclaration.getPredicate().getName() + "(" + + dataSourceDeclaration.getPredicate().getArity() + "): " + + dataSourceDeclaration.getDataSource().getSyntacticRepresentation(); } public static String getString(Conjunction conjunction) { diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java index 29578fbf2..ace59318b 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java @@ -46,12 +46,11 @@ public class SparqlQueryResultDataSource extends VLogDataSource { /** * Creates a data source from answers to a remote SPARQL query. * - * @param endpoint - * web location of the resource the query will be evaluated on - * @param queryVariables - * comma-separated list of SPARQL variable names (without leading ? or $) - * @param queryBody - * content of the WHERE clause in the SPARQL query + * @param endpoint web location of the resource the query will be + * evaluated on + * @param queryVariables comma-separated list of SPARQL variable names (without + * leading ? or $) + * @param queryBody content of the WHERE clause in the SPARQL query */ // TODO add examples to javadoc // TODO add illegal argument exceptions to javadoc @@ -70,15 +69,15 @@ public SparqlQueryResultDataSource(final URL endpoint, final String queryVariabl /** * Creates a data source from answers to a remote SPARQL query. * - * @param endpoint - * the web location of the resource the query will be evaluated on. - * @param queryVariables - * the variables of the query, in the given order. The variable at - * each position in the ordered set will be mapped to its - * correspondent query answer term at the same position. - * @param queryBody - * the content of the WHERE clause in the SPARQL query. Must - * not contain {@code newline} characters ({@code "\n")}. + * @param endpoint the web location of the resource the query will be + * evaluated on. + * @param queryVariables the variables of the query, in the given order. The + * variable at each position in the ordered set will be + * mapped to its correspondent query answer term at the + * same position. + * @param queryBody the content of the WHERE clause in the SPARQL + * query. Must not contain {@code newline} characters + * ({@code "\n")}. */ // TODO add examples to javadoc // TODO add illegal argument exceptions to javadoc @@ -166,8 +165,7 @@ public String toString() { @Override public String getSyntacticRepresentation() { - return "Sparql(\"" + endpoint + "\"" + ", \"" + queryVariables + "\"" - + ", \"" + queryBody + "\") ."; + return "sparql(" + "<" + endpoint + ">" + ", \"" + queryVariables + "\"" + ", \"" + queryBody + "\") ."; } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java index ff34b2c5a..e9ab57f5e 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java @@ -89,8 +89,8 @@ public void DataSourceDeclarationToStringTest() throws IOException { final File unzippedRdfFile = new File(FileDataSourceTestUtils.INPUT_FOLDER + "file.nt"); Predicate predicate1 = Expressions.makePredicate("p", 3); Predicate predicate2 = Expressions.makePredicate("q", 1); - final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(new URL("https://example.org/"), - "var", "?var wdt:P31 wd:Q5 ."); + final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource( + new URL("https://example.org/sparql"), "var", "?var wdt:P31 wd:Q5 ."); final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File(csvFile)); final RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(unzippedRdfFile); final DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource); @@ -98,10 +98,10 @@ public void DataSourceDeclarationToStringTest() throws IOException { unzippedCsvFileDataSource); final DataSourceDeclaration dataSourceDeclaration3 = new DataSourceDeclarationImpl(predicate2, unzippedRdfFileDataSource); - assertEquals("Sparql(\"https://example.org/\", \"var\", \"?var wdt:P31 wd:Q5 .\") .", + assertEquals("@source p(3): sparql(, \"var\", \"?var wdt:P31 wd:Q5 .\") .", dataSourceDeclaration1.toString()); - assertEquals("load-csv(\"src/test/data/input/file.csv\") .", dataSourceDeclaration2.toString()); - assertEquals("load-rdf(\"src/test/data/input/file.nt\") .", dataSourceDeclaration3.toString()); + assertEquals("@source q(1): load-csv(\"src/test/data/input/file.csv\") .", dataSourceDeclaration2.toString()); + assertEquals("@source q(1): load-rdf(\"src/test/data/input/file.nt\") .", dataSourceDeclaration3.toString()); } } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java new file mode 100644 index 000000000..e669cf066 --- /dev/null +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java @@ -0,0 +1,152 @@ +package org.semanticweb.vlog4j.syntax.parser; + +/*- + * #%L + * VLog4j Parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.File; +import java.io.IOException; +import java.net.URL; +import java.util.Arrays; +import java.util.List; + +import org.junit.Test; +import org.semanticweb.vlog4j.core.model.api.Conjunction; +import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.Literal; +import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Variable; +import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; +import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; +import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.vlog4j.core.model.implementation.DatatypeConstantImpl; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.model.implementation.FactImpl; +import org.semanticweb.vlog4j.core.model.implementation.LanguageStringConstantImpl; +import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; +import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.vlog4j.core.reasoner.*; +import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.RuleParser; + +public class EntityTest { + final Variable x = Expressions.makeUniversalVariable("X"); + final Variable y = Expressions.makeUniversalVariable("Y"); + final Variable z = Expressions.makeExistentialVariable("Z"); + final Variable y2 = Expressions.makeUniversalVariable("Y"); + final Constant d = Expressions.makeAbstractConstant("d"); + final Constant c = Expressions.makeAbstractConstant("c"); + final AbstractConstantImpl f = new AbstractConstantImpl("f"); + final LanguageStringConstantImpl s = new LanguageStringConstantImpl("Test", "en"); + final DatatypeConstantImpl data = new DatatypeConstantImpl("data", "http://example.org/mystring"); + final PositiveLiteral atom1 = Expressions.makePositiveLiteral("p", x, c); + final PositiveLiteral atom2 = Expressions.makePositiveLiteral("p", x, y); + final PositiveLiteral headAtom1 = Expressions.makePositiveLiteral("q", x, z); + final PositiveLiteral positiveLiteral1 = Expressions.makePositiveLiteral("p", x, c); + final PositiveLiteral positiveLiteral2 = Expressions.makePositiveLiteral("p", y2, x); + final PositiveLiteral positiveLiteral3 = Expressions.makePositiveLiteral("q", x, d); + final NegativeLiteral NegativeLiteral = Expressions.makeNegativeLiteral("r", x, d); + final PositiveLiteral PositiveLiteral4 = Expressions.makePositiveLiteral("s", c, s); + final Predicate p = Expressions.makePredicate("p", 2); + final Fact f1 = Expressions.makeFact(p, Arrays.asList(f, s)); + final Fact f2 = Expressions.makeFact("p", Arrays.asList(data, d)); + final List LiteralList = Arrays.asList(positiveLiteral1, positiveLiteral2, positiveLiteral3, + NegativeLiteral, PositiveLiteral4); + final Conjunction bodyLiterals = Expressions.makeConjunction(atom1, atom2); + final Conjunction headPositiveLiterals = Expressions.makePositiveConjunction(headAtom1); + final Conjunction bodyConjunction = new ConjunctionImpl<>(LiteralList); + final Rule rule1 = new RuleImpl(headPositiveLiterals, bodyLiterals); + final Rule rule2 = new RuleImpl(headPositiveLiterals, bodyConjunction); + + @Test + public void factToStringRoundTripTest() { + KnowledgeBase kb = new KnowledgeBase(); + KnowledgeBase kb2 = new KnowledgeBase(); + kb.addStatement(f1); + kb2.addStatement(f2); + assertEquals(f1.toString(), kb.getFacts().get(0).toString()); + assertEquals(f2.toString(), kb2.getFacts().get(0).toString()); + } + + @Test + public void literalToStringRoundTripTest() { + KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(rule1); + assertEquals(headAtom1.toString(), rule1.getHead().getLiterals().get(0).toString()); + } + + @Test + public void conjunctionToStringRoundTripTest() { + KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(rule1); + assertEquals(bodyConjunction.toString(), rule2.getBody().toString()); + } + + @Test + public void predicateToStringRoundTripTest() { + KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(rule1); + assertEquals(bodyConjunction.getLiterals().get(0).getPredicate().toString(), p.toString()); + } + + @Test + public void ruleToStringRoundTripTest() { + KnowledgeBase kb = new KnowledgeBase(); + KnowledgeBase kb2 = new KnowledgeBase(); + kb.addStatement(rule1); + kb2.addStatement(rule2); + assertEquals(kb.getRules().get(0).toString(), rule1.toString()); + assertEquals(kb2.getRules().get(0).toString(), rule2.toString()); + } + + @Test + public void dataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { + KnowledgeBase kb = new KnowledgeBase(); + final String INPUT_FOLDER = "src/test/data/input/"; + final String csvFile = INPUT_FOLDER + "file.csv"; + final File unzippedRdfFile = new File(INPUT_FOLDER + "file.nt"); + Predicate predicate1 = Expressions.makePredicate("p", 3); + Predicate predicate2 = Expressions.makePredicate("q", 1); + final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource( + new URL("https://example.org/sparql"), "var", "?var wdt:P31 wd:Q5 ."); + final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File(csvFile)); + final RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(unzippedRdfFile); + final DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource); + final DataSourceDeclaration dataSourceDeclaration2 = new DataSourceDeclarationImpl(predicate2, + unzippedCsvFileDataSource); + final DataSourceDeclaration dataSourceDeclaration3 = new DataSourceDeclarationImpl(predicate1, + unzippedRdfFileDataSource); + System.out.println(dataSourceDeclaration1.toString()); + RuleParser.parseInto(kb, dataSourceDeclaration1.toString()); + RuleParser.parseInto(kb, dataSourceDeclaration2.toString()); + RuleParser.parseInto(kb, dataSourceDeclaration3.toString()); + } + +} From c436c57e403f9967af2552d1d93a41d7dcc2cb47 Mon Sep 17 00:00:00 2001 From: alloka Date: Wed, 13 Nov 2019 04:07:16 +0100 Subject: [PATCH 0360/1003] moved getSyntacticRepresentation to interfaces --- .../core/model/api/AbstractConstant.java | 7 +++++++ .../vlog4j/core/model/api/Conjunction.java | 9 ++++++++- .../core/model/api/DataSourceDeclaration.java | 9 ++++++++- .../core/model/api/DatatypeConstant.java | 7 +++++++ .../vlog4j/core/model/api/Entity.java | 11 +++++----- .../core/model/api/ExistentialVariable.java | 9 ++++++++- .../vlog4j/core/model/api/Fact.java | 7 +++++++ .../model/api/LanguageStringConstant.java | 7 +++++++ .../vlog4j/core/model/api/Literal.java | 5 +++++ .../vlog4j/core/model/api/NamedNull.java | 12 ++++++++--- .../vlog4j/core/model/api/Predicate.java | 7 +++++++ .../vlog4j/core/model/api/Rule.java | 7 +++++++ .../core/model/api/UniversalVariable.java | 7 +++++++ .../implementation/AbstractConstantImpl.java | 4 ---- .../implementation/AbstractLiteralImpl.java | 4 ---- .../model/implementation/ConjunctionImpl.java | 4 ---- .../DataSourceDeclarationImpl.java | 4 ---- .../implementation/DatatypeConstantImpl.java | 4 ---- .../ExistentialVariableImpl.java | 4 ---- .../core/model/implementation/FactImpl.java | 2 +- .../LanguageStringConstantImpl.java | 4 ---- .../model/implementation/NamedNullImpl.java | 4 ---- .../model/implementation/PredicateImpl.java | 4 ---- .../core/model/implementation/RuleImpl.java | 5 ----- .../core/model/implementation/Serializer.java | 20 ++----------------- .../implementation/UniversalVariableImpl.java | 4 ---- .../core/model/DataSourceDeclarationTest.java | 13 ------------ 27 files changed, 96 insertions(+), 88 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/AbstractConstant.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/AbstractConstant.java index cc7783ba1..760b74358 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/AbstractConstant.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/AbstractConstant.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.core.model.api; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /*- * #%L * VLog4j Core Components @@ -34,4 +36,9 @@ default TermType getType() { return TermType.ABSTRACT_CONSTANT; } + @Override + default String getSyntacticRepresentation() { + return Serializer.getString(this); + } + } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Conjunction.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Conjunction.java index 4d02bc9b9..550d3fa4e 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Conjunction.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Conjunction.java @@ -22,6 +22,8 @@ import java.util.List; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /** * Interface for representing conjunctions of {@link Literal}s, i.e., lists of * (negated or positive) atomic formulas that are connected with logical AND. @@ -30,7 +32,7 @@ * @author Markus Krötzsch * */ -public interface Conjunction extends Iterable, SyntaxObject { +public interface Conjunction extends Iterable, SyntaxObject, Entity { /** * Returns the list of literals that are part of this conjunction. @@ -39,4 +41,9 @@ public interface Conjunction extends Iterable, SyntaxObjec */ List getLiterals(); + @Override + default String getSyntacticRepresentation() { + return Serializer.getString(this); + } + } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSourceDeclaration.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSourceDeclaration.java index 357c85ed8..954574e1f 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSourceDeclaration.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSourceDeclaration.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.core.model.api; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /*- * #%L * VLog4j Core Components @@ -27,7 +29,7 @@ * @author Markus Kroetzsch * */ -public interface DataSourceDeclaration extends Statement, Entity{ +public interface DataSourceDeclaration extends Statement, Entity { /** * Returns the {@link Predicate} that this source applies to. @@ -42,4 +44,9 @@ public interface DataSourceDeclaration extends Statement, Entity{ * @return data source specification */ DataSource getDataSource(); + + @Override + default String getSyntacticRepresentation() { + return Serializer.getString(this); + } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DatatypeConstant.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DatatypeConstant.java index 390e7a909..3702b7b52 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DatatypeConstant.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DatatypeConstant.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.core.model.api; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /*- * #%L * VLog4j Core Components @@ -57,4 +59,9 @@ default TermType getType() { * @return a non-null string */ String getLexicalValue(); + + @Override + default String getSyntacticRepresentation() { + return Serializer.getString(this); + } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java index 73b5d05ec..8126a4ca3 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java @@ -20,17 +20,18 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.implementation.Serializer; - /** - * Interface with getSyntacticRepresentation method that must be extended by any - * data model that can be parsed in order to obtain its correct parsable string. + * Interface for every parsable Data model that has a string representation * * @author Ali Elhalawati * */ public interface Entity { - + /** + * returns the parsable String representation of an Entity. + * + * @return non-empty String + */ String getSyntacticRepresentation(); } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/ExistentialVariable.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/ExistentialVariable.java index ad57ed712..d573a7850 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/ExistentialVariable.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/ExistentialVariable.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.core.model.api; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /*- * #%L * VLog4j Core Components @@ -32,5 +34,10 @@ public interface ExistentialVariable extends Variable { default TermType getType() { return TermType.EXISTENTIAL_VARIABLE; } - + + @Override + default String getSyntacticRepresentation() { + return Serializer.getString(this); + } + } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Fact.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Fact.java index d9943f75e..36e7c1fef 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Fact.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Fact.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.core.model.api; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /*- * #%L * VLog4j Core Components @@ -29,4 +31,9 @@ */ public interface Fact extends PositiveLiteral, Statement { + @Override + default String getSyntacticRepresentation() { + return Serializer.getFactString(this); + } + } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java index a1c24427f..b3694d565 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.core.model.api; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /*- * #%L * VLog4j Core Components @@ -60,4 +62,9 @@ default String getDatatype() { * @return a non-empty string */ String getLanguageTag(); + + @Override + default String getSyntacticRepresentation() { + return Serializer.getString(this); + } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Literal.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Literal.java index fab5c530a..791615697 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Literal.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Literal.java @@ -52,4 +52,9 @@ public interface Literal extends SyntaxObject, Entity { */ List getArguments(); + @Override + default String getSyntacticRepresentation() { + return Serializer.getString(this); + } + } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/NamedNull.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/NamedNull.java index 4ca7fbecb..5413b9365 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/NamedNull.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/NamedNull.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.core.model.api; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /* * #%L * VLog4j Core Components @@ -28,11 +30,15 @@ * @author david.carral@tu-dresden.de */ public interface NamedNull extends Term { - + @Override default TermType getType() { return TermType.NAMED_NULL; } - -} + @Override + default String getSyntacticRepresentation() { + return Serializer.getString(this); + } + +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java index 87bd036c7..7b604f289 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.core.model.api; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /*- * #%L * VLog4j Core Components @@ -47,4 +49,9 @@ public interface Predicate extends Entity { */ int getArity(); + @Override + default String getSyntacticRepresentation() { + return Serializer.getString(this); + } + } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Rule.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Rule.java index 0eb6cc325..804524b80 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Rule.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Rule.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.core.model.api; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /*- * #%L * VLog4j Core Components @@ -46,4 +48,9 @@ public interface Rule extends SyntaxObject, Statement, Entity { */ Conjunction getBody(); + @Override + default String getSyntacticRepresentation() { + return Serializer.getString(this); + } + } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/UniversalVariable.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/UniversalVariable.java index 7827886b8..975620a03 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/UniversalVariable.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/UniversalVariable.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.core.model.api; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /* * #%L * VLog4j Core Components @@ -32,4 +34,9 @@ public interface UniversalVariable extends Variable { default TermType getType() { return TermType.UNIVERSAL_VARIABLE; } + + @Override + default String getSyntacticRepresentation() { + return Serializer.getString(this); + } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractConstantImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractConstantImpl.java index 140c6312e..0820e16de 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractConstantImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractConstantImpl.java @@ -45,10 +45,6 @@ public T accept(TermVisitor termVisitor) { return termVisitor.visit(this); } - public String getSyntacticRepresentation() { - return Serializer.getString(this); - } - @Override public String toString() { return getSyntacticRepresentation(); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractLiteralImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractLiteralImpl.java index a912bb6f6..5e2d141a3 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractLiteralImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractLiteralImpl.java @@ -92,10 +92,6 @@ public boolean equals(final Object obj) { && this.getArguments().equals(other.getArguments()); } - public String getSyntacticRepresentation() { - return Serializer.getString(this); - } - @Override public String toString() { return getSyntacticRepresentation(); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java index 3295d7d1a..8167f43c2 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java @@ -85,10 +85,6 @@ public Iterator iterator() { return getLiterals().iterator(); } - public String getSyntacticRepresentation() { - return Serializer.getString(this); - } - @Override public String toString() { return getSyntacticRepresentation(); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java index ce0edbf1d..2a905dcbb 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java @@ -83,10 +83,6 @@ public T accept(StatementVisitor statementVisitor) { return statementVisitor.visit(this); } - public String getSyntacticRepresentation() { - return Serializer.getString(this); - } - @Override public String toString() { return getSyntacticRepresentation(); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java index 9242ae321..d50693640 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java @@ -58,10 +58,6 @@ public String getLexicalValue() { return this.lexicalValue; } - public String getSyntacticRepresentation() { - return Serializer.getString(this); - } - @Override public String toString() { return getSyntacticRepresentation(); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ExistentialVariableImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ExistentialVariableImpl.java index 33092db87..685d273a9 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ExistentialVariableImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ExistentialVariableImpl.java @@ -44,10 +44,6 @@ public T accept(TermVisitor termVisitor) { return termVisitor.visit(this); } - public String getSyntacticRepresentation() { - return Serializer.getString(this); - } - @Override public String toString() { return getSyntacticRepresentation(); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java index 2fc3d3d81..d22794133 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java @@ -50,7 +50,7 @@ public T accept(StatementVisitor statementVisitor) { @Override public String toString() { - return Serializer.getString(this); + return getSyntacticRepresentation(); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java index 31ca5e440..bb0df83c8 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java @@ -82,10 +82,6 @@ public boolean equals(Object obj) { return this.string.equals(other.getString()) && this.lang.equals(other.getLanguageTag()); } - public String getSyntacticRepresentation() { - return Serializer.getString(this); - } - @Override public String toString() { return getSyntacticRepresentation(); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NamedNullImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NamedNullImpl.java index 0b4e44640..5b3a0adc6 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NamedNullImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NamedNullImpl.java @@ -46,10 +46,6 @@ public T accept(TermVisitor termVisitor) { return termVisitor.visit(this); } - public String getSyntacticRepresentation() { - return Serializer.getString(this); - } - @Override public String toString() { return getSyntacticRepresentation(); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PredicateImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PredicateImpl.java index 85e9230df..38fac8686 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PredicateImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PredicateImpl.java @@ -85,10 +85,6 @@ public boolean equals(Object obj) { return this.arity == other.getArity() && this.name.equals(other.getName()); } - public String getSyntacticRepresentation() { - return Serializer.getString(this); - } - @Override public String toString() { return getSyntacticRepresentation(); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java index 8eabef7a1..4ffbae0de 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java @@ -103,11 +103,6 @@ public boolean equals(final Object obj) { return this.head.equals(other.getHead()) && this.body.equals(other.getBody()); } - - public String getSyntacticRepresentation() { - return Serializer.getString(this); - } - @Override public String toString() { return getSyntacticRepresentation(); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index a0673a697..b862c797d 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -1,7 +1,5 @@ package org.semanticweb.vlog4j.core.model.implementation; -import java.util.List; - /*- * #%L * VLog4j Core Components @@ -24,7 +22,6 @@ import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; import org.semanticweb.vlog4j.core.model.api.ExistentialVariable; @@ -36,7 +33,6 @@ import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.UniversalVariable; -import org.semanticweb.vlog4j.core.model.api.Variable; /** * A utility class with static methods to obtain the correct parsable string @@ -74,20 +70,8 @@ public static String getString(Literal literal) { return stringBuilder.toString(); } - public static String getString(Fact fact) { - final StringBuilder stringBuilder = new StringBuilder(""); - stringBuilder.append(fact.getPredicate().getName()).append("("); - boolean first = true; - for (final Term term : fact.getArguments()) { - if (first) { - first = false; - } else { - stringBuilder.append(", "); - } - stringBuilder.append(term.getSyntacticRepresentation()); - } - stringBuilder.append(")."); - return stringBuilder.toString(); + public static String getFactString(Fact fact) { + return getString(fact) + "."; } public static String getString(Constant constant) { diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/UniversalVariableImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/UniversalVariableImpl.java index 82493488d..e21cf3e9c 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/UniversalVariableImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/UniversalVariableImpl.java @@ -44,10 +44,6 @@ public T accept(TermVisitor termVisitor) { return termVisitor.visit(this); } - public String getSyntacticRepresentation() { - return Serializer.getString(this); - } - @Override public String toString() { return getSyntacticRepresentation(); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java index e9ab57f5e..982dcd8b3 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java @@ -70,19 +70,6 @@ public void equalityTest() throws MalformedURLException { assertFalse(dataSourceDeclaration1.equals(null)); // written like this for recording coverage properly } - @Test - public void dataSourceDecalarationToStringTest() throws MalformedURLException { - DataSource dataSource1 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var", - "?var wdt:P31 wd:Q5 ."); - Predicate predicate1 = Expressions.makePredicate("p", 3); - DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource1); - DataSource dataSource2 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var", - "?var wdt:P31 wd:Q5 ."); - Predicate predicate2 = Expressions.makePredicate("p", 3); - DataSourceDeclaration dataSourceDeclaration2 = new DataSourceDeclarationImpl(predicate2, dataSource2); - assertEquals(dataSourceDeclaration1.toString(), dataSourceDeclaration2.toString()); - } - @Test public void DataSourceDeclarationToStringTest() throws IOException { final String csvFile = FileDataSourceTestUtils.INPUT_FOLDER + "file.csv"; From b2c72128dfc7ddf1b41d6c912d5fcf741f94f2ab Mon Sep 17 00:00:00 2001 From: alloka Date: Wed, 13 Nov 2019 04:10:32 +0100 Subject: [PATCH 0361/1003] added javadoc --- .../java/org/semanticweb/vlog4j/core/model/api/Entity.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java index 8126a4ca3..cc58a7806 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java @@ -21,14 +21,14 @@ */ /** - * Interface for every parsable Data model that has a string representation + * Interface for every parsable data model that has a string representation * * @author Ali Elhalawati * */ public interface Entity { /** - * returns the parsable String representation of an Entity. + * returns the parsable string representation of an Entity. * * @return non-empty String */ From 4eefbe4e87e105356d69867fb1c009ebebd1970a Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Wed, 13 Nov 2019 18:42:32 +0100 Subject: [PATCH 0362/1003] add getExtensionSize method and some tests --- .../vlog4j/core/reasoner/Reasoner.java | 3 + .../reasoner/implementation/VLogReasoner.java | 18 +- .../implementation/ExtensionSizeTest.java | 206 ++++++++++++++++++ 3 files changed, 226 insertions(+), 1 deletion(-) create mode 100644 vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExtensionSizeTest.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 6c274b820..e37561483 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -377,6 +377,9 @@ public static Reasoner getInstance() { */ long queryAnswerSize(PositiveLiteral query); + // TODO add javadoc, examples + long getExtensionSize(PositiveLiteral literal); + // TODO add examples to query javadoc /** * Evaluates an atomic ({@code query}), and returns the number of implicit facts diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 33f1f428f..cb0b75f75 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -608,7 +608,7 @@ public long queryAnswerSize(PositiveLiteral query, boolean includeNulls) { final boolean filterBlanks = !includeNulls; final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); - int result = -1; + long result = -1; try { result = this.vLog.querySize(vLogAtom, true, filterBlanks); } catch (NotStartedException e) { @@ -621,6 +621,22 @@ public long queryAnswerSize(PositiveLiteral query, boolean includeNulls) { return result; } + @Override + public long getExtensionSize(PositiveLiteral literal) { + validateNotClosed(); + validateKBLoaded("Querying is not alowed before reasoner is loaded!"); + + final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(literal); + + long result = 0; + try { + result = this.vLog.getExtensionSize(this.vLog.getPredicateId(vLogAtom.getPredicate())); + } catch (NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state.", e); + } + return result; + } + @Override public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final String csvFilePath, final boolean includeBlanks) throws IOException { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExtensionSizeTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExtensionSizeTest.java new file mode 100644 index 000000000..8e16694b0 --- /dev/null +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExtensionSizeTest.java @@ -0,0 +1,206 @@ +package org.semanticweb.vlog4j.core.reasoner.implementation; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; + +import java.io.IOException; + +import org.junit.Test; +import org.semanticweb.vlog4j.core.model.api.Conjunction; +import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.Literal; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Variable; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; + +public class ExtensionSizeTest { + + private static final Predicate predP = Expressions.makePredicate("P", 1); + private static final Predicate predQ = Expressions.makePredicate("Q", 1); + private static final Predicate predR = Expressions.makePredicate("R", 2); + private static final Variable x = Expressions.makeUniversalVariable("x"); + private static final Variable y = Expressions.makeExistentialVariable("y"); + private static final Constant c = Expressions.makeAbstractConstant("c"); + private static final Constant d = Expressions.makeAbstractConstant("d"); + private static final Constant e = Expressions.makeAbstractConstant("e"); + private static final Constant f = Expressions.makeAbstractConstant("f"); + + private static final PositiveLiteral Px = Expressions.makePositiveLiteral(predP, x); + private static final PositiveLiteral Qx = Expressions.makePositiveLiteral(predQ, x); + private static final PositiveLiteral Qy = Expressions.makePositiveLiteral(predQ, y); + private static final PositiveLiteral Rxy = Expressions.makePositiveLiteral(predR, x, y); + private static final Conjunction conRxyQy = Expressions.makePositiveConjunction(Rxy, Qy); + private static final Conjunction conPx = Expressions.makeConjunction(Px); + + private static final Rule QxPx = Expressions.makeRule(Qx, Px); + private static final Rule RxyQyPx = Expressions.makeRule(conRxyQy, conPx); + + private static final Fact factPc = Expressions.makeFact(predP, c); + private static final Fact factPd = Expressions.makeFact(predP, d); + + private static final Fact factQe = Expressions.makeFact(predQ, e); + private static final Fact factQf = Expressions.makeFact(predQ, f); + + private static final PositiveLiteral Rdy = Expressions.makePositiveLiteral(predR, d, y); + private static final PositiveLiteral Rxd = Expressions.makePositiveLiteral(predR, x, d); + private static final PositiveLiteral Rxe = Expressions.makePositiveLiteral(predR, x, e); + + @Test + public void noFactsnoRules() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(0, reasoner.getExtensionSize(Px)); + } + } + + @Test + public void noFactsUniversalRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(QxPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(0, reasoner.getExtensionSize(Px)); + assertEquals(0, reasoner.getExtensionSize(Qx)); + assertEquals(0, reasoner.getExtensionSize(Rxy)); + } + } + + @Test + public void noFactsExistentialRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(RxyQyPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(0, reasoner.getExtensionSize(Px)); + assertEquals(0, reasoner.getExtensionSize(Qx)); + assertEquals(0, reasoner.getExtensionSize(Rxy)); + } + } + + @Test + public void pFactsNoRules() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(2, reasoner.getExtensionSize(Px)); + assertEquals(0, reasoner.getExtensionSize(Qx)); + assertEquals(0, reasoner.getExtensionSize(Rxy)); + } + } + + @Test + public void pFactsUniversalRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd, QxPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(2, reasoner.getExtensionSize(Px)); + assertEquals(2, reasoner.getExtensionSize(Qx)); + assertEquals(0, reasoner.getExtensionSize(Rxy)); + } + } + + @Test + public void pFactsExistentialRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd, RxyQyPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(2, reasoner.getExtensionSize(Px)); + assertEquals(2, reasoner.getExtensionSize(Qx)); + assertEquals(2, reasoner.getExtensionSize(Qx)); + assertEquals(2, reasoner.getExtensionSize(Rxy)); + } + } + + @Test + public void qFactsUniversalRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factQe, factQf, RxyQyPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(0, reasoner.getExtensionSize(Px)); + assertEquals(2, reasoner.getExtensionSize(Qx)); + assertEquals(0, reasoner.getExtensionSize(Rxy)); + } + } + + @Test + public void qFactsExistentialRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factQe, factQf, RxyQyPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(0, reasoner.getExtensionSize(Px)); + assertEquals(2, reasoner.getExtensionSize(Qx)); + assertEquals(0, reasoner.getExtensionSize(Rxy)); + } + } + + @Test + public void pFactsQFactsUniversalRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd, factQe, factQf, QxPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(2, reasoner.getExtensionSize(Px)); + assertEquals(4, reasoner.getExtensionSize(Qx)); + assertEquals(0, reasoner.getExtensionSize(Rxy)); + } + } + + @Test + public void pFactsQFactsExistentialRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd, factQe, factQf, RxyQyPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(2, reasoner.getExtensionSize(Px)); + assertEquals(4, reasoner.getExtensionSize(Qx)); + assertEquals(2, reasoner.getExtensionSize(Rxy)); + + assertEquals(2, reasoner.getExtensionSize(Rdy)); + assertEquals(2, reasoner.getExtensionSize(Rxe)); + } + } + + @Test + public void pFactsQFactsExistentialAndUniversalRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd, factQe, factQf, QxPx, RxyQyPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(2, reasoner.getExtensionSize(Px)); + assertEquals(6, reasoner.getExtensionSize(Qx)); + assertEquals(2, reasoner.getExtensionSize(Rxy)); + + assertEquals(2, reasoner.getExtensionSize(Rdy)); + assertEquals(2, reasoner.getExtensionSize(Rxd)); + } + } +} From 90ab9519265aa0cf4f03b3910ff9bb8f3f13762c Mon Sep 17 00:00:00 2001 From: alloka Date: Fri, 15 Nov 2019 15:23:54 +0100 Subject: [PATCH 0363/1003] modified round trip test --- .../vlog4j/syntax/parser/EntityTest.java | 44 ++++--------------- 1 file changed, 8 insertions(+), 36 deletions(-) diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java index e669cf066..bcf472459 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java @@ -86,44 +86,17 @@ public class EntityTest { final Rule rule2 = new RuleImpl(headPositiveLiterals, bodyConjunction); @Test - public void factToStringRoundTripTest() { - KnowledgeBase kb = new KnowledgeBase(); - KnowledgeBase kb2 = new KnowledgeBase(); - kb.addStatement(f1); - kb2.addStatement(f2); - assertEquals(f1.toString(), kb.getFacts().get(0).toString()); - assertEquals(f2.toString(), kb2.getFacts().get(0).toString()); - } - - @Test - public void literalToStringRoundTripTest() { - KnowledgeBase kb = new KnowledgeBase(); - kb.addStatement(rule1); - assertEquals(headAtom1.toString(), rule1.getHead().getLiterals().get(0).toString()); - } - - @Test - public void conjunctionToStringRoundTripTest() { - KnowledgeBase kb = new KnowledgeBase(); - kb.addStatement(rule1); - assertEquals(bodyConjunction.toString(), rule2.getBody().toString()); + public void factToStringRoundTripTest() throws ParsingException { + assertEquals(RuleParser.parseFact(f1.toString()), RuleParser.parseFact("p(f, \"Test\"@en).")); + assertEquals(RuleParser.parseFact(f2.toString()), + RuleParser.parseFact("p(\"data\"^^, d).")); } @Test - public void predicateToStringRoundTripTest() { - KnowledgeBase kb = new KnowledgeBase(); - kb.addStatement(rule1); - assertEquals(bodyConjunction.getLiterals().get(0).getPredicate().toString(), p.toString()); - } - - @Test - public void ruleToStringRoundTripTest() { - KnowledgeBase kb = new KnowledgeBase(); - KnowledgeBase kb2 = new KnowledgeBase(); - kb.addStatement(rule1); - kb2.addStatement(rule2); - assertEquals(kb.getRules().get(0).toString(), rule1.toString()); - assertEquals(kb2.getRules().get(0).toString(), rule2.toString()); + public void ruleToStringRoundTripTest() throws ParsingException { + assertEquals(RuleParser.parseRule(rule1.toString()), RuleParser.parseRule("q(?X, !Z) :- p(?X, c), p(?X, ?Y).")); + assertEquals(RuleParser.parseRule(rule2.toString()), + RuleParser.parseRule("q(?X, !Z) :- p(?X, c), p(?Y, ?X), q(?X, d), ~r(?X, d), s(c, \"Test\"@en).")); } @Test @@ -143,7 +116,6 @@ public void dataSourceDeclarationToStringParsingTest() throws ParsingException, unzippedCsvFileDataSource); final DataSourceDeclaration dataSourceDeclaration3 = new DataSourceDeclarationImpl(predicate1, unzippedRdfFileDataSource); - System.out.println(dataSourceDeclaration1.toString()); RuleParser.parseInto(kb, dataSourceDeclaration1.toString()); RuleParser.parseInto(kb, dataSourceDeclaration2.toString()); RuleParser.parseInto(kb, dataSourceDeclaration3.toString()); From 85c4addcd9d459218f90b51db8b7544c186cec8d Mon Sep 17 00:00:00 2001 From: alloka Date: Sun, 17 Nov 2019 17:18:22 +0100 Subject: [PATCH 0364/1003] added javadoc and removed unused iomports --- .../vlog4j/core/model/api/Entity.java | 2 +- .../implementation/DatatypeConstantImpl.java | 2 +- .../LanguageStringConstantImpl.java | 2 +- .../core/model/implementation/Serializer.java | 96 ++++++++++++++++++- .../core/model/DataSourceDeclarationTest.java | 8 +- .../vlog4j/core/model/FactTest.java | 3 +- .../core/model/PositiveLiteralImplTest.java | 1 + .../vlog4j/core/model/PredicateImplTest.java | 2 +- .../vlog4j/core/model/TermImplTest.java | 5 +- .../vlog4j/syntax/parser/EntityTest.java | 6 +- 10 files changed, 105 insertions(+), 22 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java index cc58a7806..d5fd0306e 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java @@ -28,7 +28,7 @@ */ public interface Entity { /** - * returns the parsable string representation of an Entity. + * Returns the parsable string representation of an Entity. * * @return non-empty String */ diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java index d50693640..a366f7fed 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java @@ -65,7 +65,7 @@ public String toString() { @Override public String getName() { - return Serializer.getDatatypeConstantName(this); + return Serializer.getConstantName(this); } @Override diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java index bb0df83c8..8d0bb26f3 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java @@ -44,7 +44,7 @@ public LanguageStringConstantImpl(String string, String languageTag) { @Override public String getName() { - return Serializer.getLanguageStringConstantName(this); + return Serializer.getConstantName(this); } @Override diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index b862c797d..45b0d5fcd 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -42,15 +42,31 @@ * */ public final class Serializer { - + /** + * Constructor. + */ private Serializer() { } + /** + * Creates a String representation of a given {@link Rule}. Example: "p(?X) :- + * q(?X,?Y)." + * + * @param rule a {@link Rule} + * @return String representation corresponding to a given {@link Rule}. + */ public static String getString(Rule rule) { return getString(rule.getHead()) + " :- " + getString(rule.getBody()) + "."; } + /** + * Creates a String representation of a given {@link Literal}. Example: + * "~q(?X,?Y)" + * + * @param literal a {@link Literal} + * @return String representation corresponding to a given {@link Literal}. + */ public static String getString(Literal literal) { final StringBuilder stringBuilder = new StringBuilder(""); if (literal.isNegated()) { @@ -70,36 +86,92 @@ public static String getString(Literal literal) { return stringBuilder.toString(); } + /** + * Creates a String representation of a given {@link Fact}. Example: "q(a)." + * + * @param fact a {@link Fact} + * @return String representation corresponding to a given {@link Fact}. + */ public static String getFactString(Fact fact) { return getString(fact) + "."; } + /** + * Creates a String representation of a given {@link Constant}. Example: "c" + * + * @param constant a {@link Constant} + * @return String representation corresponding to a given {@link Constant}. + */ public static String getString(Constant constant) { return constant.getName(); } + /** + * Creates a String representation of a given {@link ExistentialVariable}. + * Example: "!X" + * + * @param existentialVariable a {@link ExistentialVariable} + * @return String representation corresponding to a given + * {@link ExistentialVariable}. + */ public static String getString(ExistentialVariable existentialVariable) { return "!" + existentialVariable.getName(); } + /** + * Creates a String representation of a given {@link UniversalVariable}. + * Example: "?X" + * + * @param universalVariable a {@link UniversalVariable} + * @return String representation corresponding to a given + * {@link UniversalVariable}. + */ public static String getString(UniversalVariable universalVariable) { return "?" + universalVariable.getName(); } + /** + * Creates a String representation of a given {@link NamedNull}. Example: "_123" + * + * @param namedNull a {@link NamedNull} + * @return String representation corresponding to a given {@link NamedNull}. + */ public static String getString(NamedNull namedNull) { return "_" + namedNull.getName(); } + /** + * Creates a String representation of a given {@link Predicate}. Example: "p(2)" + * + * @param predicate a {@link Predicate} + * @return String representation corresponding to a given {@link Predicate}. + */ public static String getString(Predicate predicate) { - return " Predicate [ name= " + predicate.getName() + ", arity= " + predicate.getArity() + "]"; + return predicate.getName() + "(" + predicate.getArity() + ")"; } + /** + * Creates a String representation of a given {@link DataSourceDeclaration}. + * Example: "@source p(3): sparql(, "var", "?var + * wdt:P31 wd:Q5 .") ." + * + * @param dataSourceDeclaration a {@link DataSourceDeclaration} + * @return String representation corresponding to a given + * {@link DataSourceDeclaration}. + */ public static String getString(DataSourceDeclaration dataSourceDeclaration) { return "@source " + dataSourceDeclaration.getPredicate().getName() + "(" + dataSourceDeclaration.getPredicate().getArity() + "): " + dataSourceDeclaration.getDataSource().getSyntacticRepresentation(); } + /** + * Creates a String representation of a given {@link Conjunction}. Example: + * "p(?X,?Y), ~q(a,?Z)" + * + * @param conjunction a {@link Conjunction} + * @return String representation corresponding to a given {@link Conjunction}. + */ public static String getString(Conjunction conjunction) { final StringBuilder stringBuilder = new StringBuilder(); boolean first = true; @@ -114,12 +186,28 @@ public static String getString(Conjunction conjunction) { return stringBuilder.toString(); } - public static String getLanguageStringConstantName(LanguageStringConstant languageStringConstant) { + /** + * Creates a String representation corresponding to the name of a given + * {@link LanguageStringConstant}. Example: ""Test"@en" + * + * @param languageStringConstant a {@link LanguageStringConstant} + * @return String representation corresponding to the name of a given + * {@link LanguageStringConstant}. + */ + public static String getConstantName(LanguageStringConstant languageStringConstant) { return "\"" + languageStringConstant.getString().replace("\\", "\\\\").replace("\"", "\\\"") + "\"@" + languageStringConstant.getLanguageTag(); } - public static String getDatatypeConstantName(DatatypeConstant datatypeConstant) { + /** + * Creates a String representation corresponding to the name of a given + * {@link DatatypeConstant}. Example: ""c"^^" + * + * @param datatypeConstant a {@link DatatypeConstant} + * @return String representation corresponding to a given + * {@link DatatypeConstant}. + */ + public static String getConstantName(DatatypeConstant datatypeConstant) { return "\"" + datatypeConstant.getLexicalValue().replace("\\", "\\\\").replace("\"", "\\\"") + "\"^^<" + datatypeConstant.getDatatype() + ">"; } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java index 982dcd8b3..9ef7ef77d 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java @@ -19,21 +19,19 @@ * limitations under the License. * #L% */ - -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; import java.io.File; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; -import java.util.Arrays; -import java.util.LinkedHashSet; import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java index 7f39dd875..a94cdf86d 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java @@ -19,8 +19,7 @@ * limitations under the License. * #L% */ - -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; import java.util.Arrays; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PositiveLiteralImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PositiveLiteralImplTest.java index ff1d5bbe3..7c8d791a7 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PositiveLiteralImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PositiveLiteralImplTest.java @@ -24,6 +24,7 @@ import static org.junit.Assert.assertNotEquals; import java.util.Arrays; + import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Literal; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PredicateImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PredicateImplTest.java index 21f184915..0c750ebb1 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PredicateImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PredicateImplTest.java @@ -76,7 +76,7 @@ public void arityZero() { @Test public void predicateToStringTest() { final Predicate p1 = new PredicateImpl("p", 1); - assertEquals(" Predicate [ name= p, arity= 1]", p1.toString()); + assertEquals("p(1)", p1.toString()); } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java index 2a218a6a2..5f6006c43 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java @@ -19,8 +19,9 @@ * limitations under the License. * #L% */ - -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java index bcf472459..718630cf5 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java @@ -19,8 +19,7 @@ * limitations under the License. * #L% */ - -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; import java.io.File; import java.io.IOException; @@ -44,13 +43,10 @@ import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.DatatypeConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.FactImpl; import org.semanticweb.vlog4j.core.model.implementation.LanguageStringConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; -import org.semanticweb.vlog4j.core.reasoner.*; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; import org.semanticweb.vlog4j.parser.ParsingException; From b49418f62343c554be0771265d1c4f743a40f7f4 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Mon, 18 Nov 2019 12:02:36 +0100 Subject: [PATCH 0365/1003] fix variable name --- .../semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java index 13dab444a..09e788f6a 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java +++ b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java @@ -222,8 +222,8 @@ void startAxiomConversion() { */ void addSubClassAxiom(final OWLClassExpression subClass, final OWLClassExpression superClass) { if (subClass instanceof OWLObjectOneOf) { - final OWLObjectOneOf subClassInstaceOf = (OWLObjectOneOf) subClass; - subClassInstaceOf.individuals().forEach(individual -> visitClassAssertionAxiom(individual, superClass)); + final OWLObjectOneOf subClassObjectOneOf = (OWLObjectOneOf) subClass; + subClassObjectOneOf.individuals().forEach(individual -> visitClassAssertionAxiom(individual, superClass)); } else { this.startAxiomConversion(); From 0e21460295d302e8c0fd625d611112b52c12a7ce Mon Sep 17 00:00:00 2001 From: alloka Date: Tue, 19 Nov 2019 01:04:49 +0100 Subject: [PATCH 0366/1003] fixed DataypeConstants and added their roundtrip tests --- .../core/model/implementation/Serializer.java | 98 +++++++++++++------ .../reasoner/implementation/VLogReasoner.java | 10 +- .../vlog4j/core/model/TermImplTest.java | 5 +- .../vlog4j/syntax/parser/EntityTest.java | 1 + .../vlog4j/syntax/parser/RuleParserTest.java | 18 ++++ 5 files changed, 94 insertions(+), 38 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 45b0d5fcd..bf205b95f 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -30,6 +30,7 @@ import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.NamedNull; import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.UniversalVariable; @@ -42,6 +43,16 @@ * */ public final class Serializer { + public static final String negativeIdentifier = "~"; + public static final String comma = ","; + public static final String dot = "."; + public static final String existentialIdentifier = "!"; + public static final String universalIdentifier = "?"; + public static final String namedNullIdentifier = "_"; + public static final String openBracket = "("; + public static final String closeBracket = ")"; + public static final String ruleSeparator = ":-"; + /** * Constructor. */ @@ -50,55 +61,58 @@ private Serializer() { } /** - * Creates a String representation of a given {@link Rule}. Example: "p(?X) :- - * q(?X,?Y)." + * Creates a String representation of a given {@link Rule}. * - * @param rule a {@link Rule} + * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @param rule a {@link Rule}. * @return String representation corresponding to a given {@link Rule}. + * */ public static String getString(Rule rule) { - return getString(rule.getHead()) + " :- " + getString(rule.getBody()) + "."; + return getString(rule.getHead()) + " " + ruleSeparator + " " + getString(rule.getBody()) + dot; } /** - * Creates a String representation of a given {@link Literal}. Example: - * "~q(?X,?Y)" + * Creates a String representation of a given {@link Literal}. * + * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param literal a {@link Literal} * @return String representation corresponding to a given {@link Literal}. */ public static String getString(Literal literal) { final StringBuilder stringBuilder = new StringBuilder(""); if (literal.isNegated()) { - stringBuilder.append("~"); + stringBuilder.append(negativeIdentifier); } - stringBuilder.append(literal.getPredicate().getName()).append("("); + stringBuilder.append(literal.getPredicate().getName()).append(openBracket); boolean first = true; for (final Term term : literal.getArguments()) { if (first) { first = false; } else { - stringBuilder.append(", "); + stringBuilder.append(comma + " "); } stringBuilder.append(term.getSyntacticRepresentation()); } - stringBuilder.append(")"); + stringBuilder.append(closeBracket); return stringBuilder.toString(); } /** - * Creates a String representation of a given {@link Fact}. Example: "q(a)." + * Creates a String representation of a given {@link Fact}. * + * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param fact a {@link Fact} * @return String representation corresponding to a given {@link Fact}. */ public static String getFactString(Fact fact) { - return getString(fact) + "."; + return getString(fact) + dot; } /** - * Creates a String representation of a given {@link Constant}. Example: "c" + * Creates a String representation of a given {@link Constant}. * + * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param constant a {@link Constant} * @return String representation corresponding to a given {@link Constant}. */ @@ -106,69 +120,74 @@ public static String getString(Constant constant) { return constant.getName(); } + public static String getString(DatatypeConstant constant) { + return getShortConstantName(constant); + } + /** * Creates a String representation of a given {@link ExistentialVariable}. - * Example: "!X" * + * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param existentialVariable a {@link ExistentialVariable} * @return String representation corresponding to a given * {@link ExistentialVariable}. */ public static String getString(ExistentialVariable existentialVariable) { - return "!" + existentialVariable.getName(); + return existentialIdentifier + existentialVariable.getName(); } /** * Creates a String representation of a given {@link UniversalVariable}. - * Example: "?X" * + * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param universalVariable a {@link UniversalVariable} * @return String representation corresponding to a given * {@link UniversalVariable}. */ public static String getString(UniversalVariable universalVariable) { - return "?" + universalVariable.getName(); + return universalIdentifier + universalVariable.getName(); } /** - * Creates a String representation of a given {@link NamedNull}. Example: "_123" + * Creates a String representation of a given {@link NamedNull}. * + * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param namedNull a {@link NamedNull} * @return String representation corresponding to a given {@link NamedNull}. */ public static String getString(NamedNull namedNull) { - return "_" + namedNull.getName(); + return namedNullIdentifier + namedNull.getName(); } /** - * Creates a String representation of a given {@link Predicate}. Example: "p(2)" + * Creates a String representation of a given {@link Predicate}. * + * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param predicate a {@link Predicate} * @return String representation corresponding to a given {@link Predicate}. */ public static String getString(Predicate predicate) { - return predicate.getName() + "(" + predicate.getArity() + ")"; + return predicate.getName() + openBracket + predicate.getArity() + closeBracket; } /** * Creates a String representation of a given {@link DataSourceDeclaration}. - * Example: "@source p(3): sparql(, "var", "?var - * wdt:P31 wd:Q5 .") ." * + * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param dataSourceDeclaration a {@link DataSourceDeclaration} * @return String representation corresponding to a given * {@link DataSourceDeclaration}. */ public static String getString(DataSourceDeclaration dataSourceDeclaration) { - return "@source " + dataSourceDeclaration.getPredicate().getName() + "(" - + dataSourceDeclaration.getPredicate().getArity() + "): " + return "@source " + dataSourceDeclaration.getPredicate().getName() + openBracket + + dataSourceDeclaration.getPredicate().getArity() + closeBracket + ": " + dataSourceDeclaration.getDataSource().getSyntacticRepresentation(); } /** - * Creates a String representation of a given {@link Conjunction}. Example: - * "p(?X,?Y), ~q(a,?Z)" + * Creates a String representation of a given {@link Conjunction}. * + * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param conjunction a {@link Conjunction} * @return String representation corresponding to a given {@link Conjunction}. */ @@ -179,7 +198,7 @@ public static String getString(Conjunction conjunction) { if (first) { first = false; } else { - stringBuilder.append(", "); + stringBuilder.append(comma + " "); } stringBuilder.append(getString(literal)); } @@ -188,8 +207,9 @@ public static String getString(Conjunction conjunction) { /** * Creates a String representation corresponding to the name of a given - * {@link LanguageStringConstant}. Example: ""Test"@en" + * {@link LanguageStringConstant}. * + * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param languageStringConstant a {@link LanguageStringConstant} * @return String representation corresponding to the name of a given * {@link LanguageStringConstant}. @@ -201,8 +221,26 @@ public static String getConstantName(LanguageStringConstant languageStringConsta /** * Creates a String representation corresponding to the name of a given - * {@link DatatypeConstant}. Example: ""c"^^" + * {@link DatatypeConstant} without an IRI. + * + * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @param datatypeConstant a {@link DatatypeConstant} + * @return String representation corresponding to a given + * {@link DatatypeConstant}. + */ + public static String getShortConstantName(DatatypeConstant datatypeConstant) { + if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_STRING)) { + return "\"" + datatypeConstant.getLexicalValue() + "\""; + } else { + return datatypeConstant.getLexicalValue(); + } + } + + /** + * Creates a String representation corresponding to the name of a given + * {@link DatatypeConstant} including an IRI. * + * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param datatypeConstant a {@link DatatypeConstant} * @return String representation corresponding to a given * {@link DatatypeConstant}. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 89a530237..6bda54351 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -11,6 +11,7 @@ import java.util.Map; import java.util.Set; +import org.apache.commons.lang3.NotImplementedException; import org.apache.commons.lang3.Validate; import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; @@ -28,7 +29,6 @@ import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; import org.semanticweb.vlog4j.core.model.implementation.UniversalVariableImpl; import org.semanticweb.vlog4j.core.reasoner.AcyclicityNotion; import org.semanticweb.vlog4j.core.reasoner.Algorithm; @@ -130,13 +130,11 @@ public boolean equals(Object obj) { return predicate.equals(other.predicate); } + @Override public String getSyntacticRepresentation() { - return Serializer.getString(this); - } - @Override - public String toString() { - return getSyntacticRepresentation(); + throw new NotImplementedException( + "This method is not implemented for type LocalFactsDataSourceDeclaration"); } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java index 5f6006c43..c34cefd8b 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java @@ -26,6 +26,7 @@ import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; import org.semanticweb.vlog4j.core.model.api.LanguageStringConstant; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.TermType; import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; @@ -141,8 +142,8 @@ public void abstractConstantToStringTest() { @Test public void datatypeConstantToStringTest() { - DatatypeConstantImpl c = new DatatypeConstantImpl("c", "http://example.org/mystring"); - assertEquals("\"c\"^^", c.toString()); + DatatypeConstantImpl c = new DatatypeConstantImpl("c", PrefixDeclarations.XSD_STRING); + assertEquals("\"c\"", c.toString()); } @Test diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java index 718630cf5..8d6121926 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java @@ -31,6 +31,7 @@ import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index de2fc73b6..7896177e2 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -41,6 +41,7 @@ import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.vlog4j.core.model.implementation.DatatypeConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; @@ -411,4 +412,21 @@ public void testBlankPredicateName() throws ParsingException { RuleParser.parse(input); } + @Test + public void DatatypeConstantgRoundTripTest() throws ParsingException { + DatatypeConstantImpl datatypeConstantString = new DatatypeConstantImpl("data", PrefixDeclarations.XSD_STRING); + DatatypeConstantImpl datatypeConstantInteger = new DatatypeConstantImpl("1", PrefixDeclarations.XSD_INTEGER); + DatatypeConstantImpl datatypeConstantFloat = new DatatypeConstantImpl("0.5", PrefixDeclarations.XSD_FLOAT); + DatatypeConstantImpl datatypeConstantDouble = new DatatypeConstantImpl("0.5", PrefixDeclarations.XSD_DOUBLE); + assertEquals(datatypeConstantString, + RuleParser.parseFact("p(\"data\"^^).").getArguments().get(0)); + assertEquals(datatypeConstantInteger, + RuleParser.parseFact("p(\"1\"^^).").getArguments().get(0)); + assertEquals(datatypeConstantFloat, + RuleParser.parseFact("p(\"0.5\"^^).").getArguments().get(0)); + assertEquals(datatypeConstantDouble, + RuleParser.parseFact("p(\"0.5\"^^).").getArguments().get(0)); + + } + } From 613f7b9e4f3d101d582bca538db805f64791a254 Mon Sep 17 00:00:00 2001 From: alloka Date: Tue, 19 Nov 2019 12:53:55 +0100 Subject: [PATCH 0367/1003] added roundtrip tests for datatypeConstants --- .../core/model/implementation/Serializer.java | 10 ++++++++- .../vlog4j/core/model/TermImplTest.java | 6 ++--- .../vlog4j/syntax/parser/EntityTest.java | 22 +++++++++++++++++-- 3 files changed, 32 insertions(+), 6 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index bf205b95f..cfe79dc6f 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -232,7 +232,15 @@ public static String getShortConstantName(DatatypeConstant datatypeConstant) { if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_STRING)) { return "\"" + datatypeConstant.getLexicalValue() + "\""; } else { - return datatypeConstant.getLexicalValue(); + if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DECIMAL) + || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_INTEGER) + || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DOUBLE) + || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_FLOAT)) { + return datatypeConstant.getLexicalValue(); + } else { + return getConstantName(datatypeConstant); + } + } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java index c34cefd8b..81bf20c96 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java @@ -97,10 +97,10 @@ public void abstractConstantGetterTest() { @Test public void datatypeConstantGetterTest() { - DatatypeConstant c = new DatatypeConstantImpl("c", "http://example.org/mystring"); + DatatypeConstant c = new DatatypeConstantImpl("c", PrefixDeclarations.XSD_STRING); assertEquals("c", c.getLexicalValue()); - assertEquals("http://example.org/mystring", c.getDatatype()); - assertEquals("\"c\"^^", c.getName()); + assertEquals("http://www.w3.org/2001/XMLSchema#string", c.getDatatype()); + assertEquals("\"c\"^^", c.getName()); assertEquals(TermType.DATATYPE_CONSTANT, c.getType()); } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java index 8d6121926..62039c855 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java @@ -37,6 +37,7 @@ import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; @@ -62,7 +63,7 @@ public class EntityTest { final Constant c = Expressions.makeAbstractConstant("c"); final AbstractConstantImpl f = new AbstractConstantImpl("f"); final LanguageStringConstantImpl s = new LanguageStringConstantImpl("Test", "en"); - final DatatypeConstantImpl data = new DatatypeConstantImpl("data", "http://example.org/mystring"); + final DatatypeConstantImpl data = new DatatypeConstantImpl("data", PrefixDeclarations.XSD_STRING); final PositiveLiteral atom1 = Expressions.makePositiveLiteral("p", x, c); final PositiveLiteral atom2 = Expressions.makePositiveLiteral("p", x, y); final PositiveLiteral headAtom1 = Expressions.makePositiveLiteral("q", x, z); @@ -86,7 +87,7 @@ public class EntityTest { public void factToStringRoundTripTest() throws ParsingException { assertEquals(RuleParser.parseFact(f1.toString()), RuleParser.parseFact("p(f, \"Test\"@en).")); assertEquals(RuleParser.parseFact(f2.toString()), - RuleParser.parseFact("p(\"data\"^^, d).")); + RuleParser.parseFact("p(\"data\"^^, d).")); } @Test @@ -96,6 +97,23 @@ public void ruleToStringRoundTripTest() throws ParsingException { RuleParser.parseRule("q(?X, !Z) :- p(?X, c), p(?Y, ?X), q(?X, d), ~r(?X, d), s(c, \"Test\"@en).")); } + @Test + public void DatatypeConstantgRoundTripTest() throws ParsingException { + DatatypeConstantImpl datatypeConstantString = new DatatypeConstantImpl("data", PrefixDeclarations.XSD_STRING); + DatatypeConstantImpl datatypeConstantInteger = new DatatypeConstantImpl("1", PrefixDeclarations.XSD_INTEGER); + DatatypeConstantImpl datatypeConstantFloat = new DatatypeConstantImpl("0.5", PrefixDeclarations.XSD_FLOAT); + DatatypeConstantImpl datatypeConstantDouble = new DatatypeConstantImpl("0.5", PrefixDeclarations.XSD_DOUBLE); + assertEquals("\"data\"", RuleParser.parseFact("p(\"data\"^^).") + .getArguments().get(0).toString()); + assertEquals("1", RuleParser.parseFact("p(\"1\"^^).").getArguments() + .get(0).toString()); + assertEquals("0.5", RuleParser.parseFact("p(\"0.5\"^^).").getArguments() + .get(0).toString()); + assertEquals("0.5", RuleParser.parseFact("p(\"0.5\"^^).") + .getArguments().get(0).toString()); + + } + @Test public void dataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { KnowledgeBase kb = new KnowledgeBase(); From 81bca7044ceea22a6dc4f595a41c916636f200df Mon Sep 17 00:00:00 2001 From: alloka Date: Tue, 19 Nov 2019 15:21:45 +0100 Subject: [PATCH 0368/1003] added roundtrip tests --- .../core/model/implementation/Serializer.java | 17 +- .../vlog4j/syntax/parser/EntityTest.java | 202 ++++++++++++------ .../vlog4j/syntax/parser/RuleParserTest.java | 3 + 3 files changed, 150 insertions(+), 72 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index cfe79dc6f..f53ca7088 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -49,8 +49,8 @@ public final class Serializer { public static final String existentialIdentifier = "!"; public static final String universalIdentifier = "?"; public static final String namedNullIdentifier = "_"; - public static final String openBracket = "("; - public static final String closeBracket = ")"; + public static final String openParentheses = "("; + public static final String closeParentheses = ")"; public static final String ruleSeparator = ":-"; /** @@ -84,7 +84,7 @@ public static String getString(Literal literal) { if (literal.isNegated()) { stringBuilder.append(negativeIdentifier); } - stringBuilder.append(literal.getPredicate().getName()).append(openBracket); + stringBuilder.append(literal.getPredicate().getName()).append(openParentheses); boolean first = true; for (final Term term : literal.getArguments()) { if (first) { @@ -94,7 +94,7 @@ public static String getString(Literal literal) { } stringBuilder.append(term.getSyntacticRepresentation()); } - stringBuilder.append(closeBracket); + stringBuilder.append(closeParentheses); return stringBuilder.toString(); } @@ -167,7 +167,7 @@ public static String getString(NamedNull namedNull) { * @return String representation corresponding to a given {@link Predicate}. */ public static String getString(Predicate predicate) { - return predicate.getName() + openBracket + predicate.getArity() + closeBracket; + return predicate.getName() + openParentheses + predicate.getArity() + closeParentheses; } /** @@ -179,8 +179,8 @@ public static String getString(Predicate predicate) { * {@link DataSourceDeclaration}. */ public static String getString(DataSourceDeclaration dataSourceDeclaration) { - return "@source " + dataSourceDeclaration.getPredicate().getName() + openBracket - + dataSourceDeclaration.getPredicate().getArity() + closeBracket + ": " + return "@source " + dataSourceDeclaration.getPredicate().getName() + openParentheses + + dataSourceDeclaration.getPredicate().getArity() + closeParentheses + ": " + dataSourceDeclaration.getDataSource().getSyntacticRepresentation(); } @@ -234,8 +234,7 @@ public static String getShortConstantName(DatatypeConstant datatypeConstant) { } else { if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DECIMAL) || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_INTEGER) - || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DOUBLE) - || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_FLOAT)) { + || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DOUBLE)) { return datatypeConstant.getLexicalValue(); } else { return getConstantName(datatypeConstant); diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java index 62039c855..88d01b2f7 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java @@ -25,25 +25,20 @@ import java.io.IOException; import java.net.URL; import java.util.Arrays; -import java.util.List; import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.vlog4j.core.model.implementation.DatatypeConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.model.implementation.LanguageStringConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; @@ -55,85 +50,166 @@ import org.semanticweb.vlog4j.parser.RuleParser; public class EntityTest { - final Variable x = Expressions.makeUniversalVariable("X"); - final Variable y = Expressions.makeUniversalVariable("Y"); - final Variable z = Expressions.makeExistentialVariable("Z"); + final Variable y2 = Expressions.makeUniversalVariable("Y"); final Constant d = Expressions.makeAbstractConstant("d"); - final Constant c = Expressions.makeAbstractConstant("c"); - final AbstractConstantImpl f = new AbstractConstantImpl("f"); + final LanguageStringConstantImpl s = new LanguageStringConstantImpl("Test", "en"); - final DatatypeConstantImpl data = new DatatypeConstantImpl("data", PrefixDeclarations.XSD_STRING); - final PositiveLiteral atom1 = Expressions.makePositiveLiteral("p", x, c); - final PositiveLiteral atom2 = Expressions.makePositiveLiteral("p", x, y); - final PositiveLiteral headAtom1 = Expressions.makePositiveLiteral("q", x, z); - final PositiveLiteral positiveLiteral1 = Expressions.makePositiveLiteral("p", x, c); - final PositiveLiteral positiveLiteral2 = Expressions.makePositiveLiteral("p", y2, x); - final PositiveLiteral positiveLiteral3 = Expressions.makePositiveLiteral("q", x, d); - final NegativeLiteral NegativeLiteral = Expressions.makeNegativeLiteral("r", x, d); - final PositiveLiteral PositiveLiteral4 = Expressions.makePositiveLiteral("s", c, s); - final Predicate p = Expressions.makePredicate("p", 2); - final Fact f1 = Expressions.makeFact(p, Arrays.asList(f, s)); - final Fact f2 = Expressions.makeFact("p", Arrays.asList(data, d)); - final List LiteralList = Arrays.asList(positiveLiteral1, positiveLiteral2, positiveLiteral3, - NegativeLiteral, PositiveLiteral4); - final Conjunction bodyLiterals = Expressions.makeConjunction(atom1, atom2); - final Conjunction headPositiveLiterals = Expressions.makePositiveConjunction(headAtom1); - final Conjunction bodyConjunction = new ConjunctionImpl<>(LiteralList); - final Rule rule1 = new RuleImpl(headPositiveLiterals, bodyLiterals); - final Rule rule2 = new RuleImpl(headPositiveLiterals, bodyConjunction); + + // final PositiveLiteral positiveLiteral1 = Expressions.makePositiveLiteral("p", + // x, c); + // final PositiveLiteral positiveLiteral2 = Expressions.makePositiveLiteral("p", + // y2, x); + // final PositiveLiteral positiveLiteral3 = Expressions.makePositiveLiteral("q", + // x, d); + // final NegativeLiteral NegativeLiteral = Expressions.makeNegativeLiteral("r", + // x, d); + // final PositiveLiteral PositiveLiteral4 = Expressions.makePositiveLiteral("s", + // c, s); + // final List LiteralList = Arrays.asList(positiveLiteral1, + // positiveLiteral2, positiveLiteral3, + // NegativeLiteral, PositiveLiteral4); + + // final Conjunction bodyConjunction = new + // ConjunctionImpl<>(LiteralList); + + // final Rule rule2 = new RuleImpl(headPositiveLiterals, bodyConjunction); @Test - public void factToStringRoundTripTest() throws ParsingException { - assertEquals(RuleParser.parseFact(f1.toString()), RuleParser.parseFact("p(f, \"Test\"@en).")); - assertEquals(RuleParser.parseFact(f2.toString()), - RuleParser.parseFact("p(\"data\"^^, d).")); + public void languageStringConstantToStringRoundTripTest() throws ParsingException { + LanguageStringConstantImpl s = new LanguageStringConstantImpl("Test", "en"); + Predicate p = Expressions.makePredicate("p", 1); + Fact f3 = Expressions.makeFact(p, Arrays.asList(s)); + assertEquals(f3, RuleParser.parseFact(f3.toString())); + } + + public void AbstractConstantToStringRoundTripTest() throws ParsingException { + AbstractConstantImpl f = new AbstractConstantImpl("f"); + AbstractConstantImpl a = new AbstractConstantImpl("1"); + Predicate p = Expressions.makePredicate("p", 1); + Fact f1 = Expressions.makeFact(p, Arrays.asList(f)); + Fact f2 = Expressions.makeFact(p, Arrays.asList(a)); + assertEquals(f1, RuleParser.parseFact(f1.toString())); + assertEquals(f2, RuleParser.parseFact(f2.toString())); } @Test public void ruleToStringRoundTripTest() throws ParsingException { - assertEquals(RuleParser.parseRule(rule1.toString()), RuleParser.parseRule("q(?X, !Z) :- p(?X, c), p(?X, ?Y).")); - assertEquals(RuleParser.parseRule(rule2.toString()), - RuleParser.parseRule("q(?X, !Z) :- p(?X, c), p(?Y, ?X), q(?X, d), ~r(?X, d), s(c, \"Test\"@en).")); + Constant c = Expressions.makeAbstractConstant("c"); + Variable x = Expressions.makeUniversalVariable("X"); + Variable y = Expressions.makeUniversalVariable("Y"); + Variable z = Expressions.makeExistentialVariable("Z"); + PositiveLiteral atom1 = Expressions.makePositiveLiteral("p", x, c); + PositiveLiteral atom2 = Expressions.makePositiveLiteral("p", x, y); + PositiveLiteral headAtom1 = Expressions.makePositiveLiteral("q", x, z); + Conjunction bodyLiterals = Expressions.makeConjunction(atom1, atom2); + Conjunction headPositiveLiterals = Expressions.makePositiveConjunction(headAtom1); + Rule rule1 = new RuleImpl(headPositiveLiterals, bodyLiterals); + assertEquals(rule1, RuleParser.parseRule(rule1.toString())); + } + + @Test + public void ConjunctionToStringRoundTripTest() throws ParsingException { + Constant c = Expressions.makeAbstractConstant("c"); + Variable x = Expressions.makeUniversalVariable("X"); + Variable y = Expressions.makeUniversalVariable("Y"); + Variable z = Expressions.makeExistentialVariable("Z"); + NegativeLiteral atom1 = Expressions.makeNegativeLiteral("p", x, c); + PositiveLiteral atom2 = Expressions.makePositiveLiteral("p", x, y); + PositiveLiteral headAtom1 = Expressions.makePositiveLiteral("q", x, z); + Conjunction bodyLiterals = Expressions.makeConjunction(atom1, atom2); + Conjunction headPositiveLiterals = Expressions.makePositiveConjunction(headAtom1); + Rule rule1 = new RuleImpl(headPositiveLiterals, bodyLiterals); + assertEquals(rule1, RuleParser.parseRule(rule1.toString())); } @Test - public void DatatypeConstantgRoundTripTest() throws ParsingException { - DatatypeConstantImpl datatypeConstantString = new DatatypeConstantImpl("data", PrefixDeclarations.XSD_STRING); - DatatypeConstantImpl datatypeConstantInteger = new DatatypeConstantImpl("1", PrefixDeclarations.XSD_INTEGER); - DatatypeConstantImpl datatypeConstantFloat = new DatatypeConstantImpl("0.5", PrefixDeclarations.XSD_FLOAT); - DatatypeConstantImpl datatypeConstantDouble = new DatatypeConstantImpl("0.5", PrefixDeclarations.XSD_DOUBLE); - assertEquals("\"data\"", RuleParser.parseFact("p(\"data\"^^).") + public void LiteralToStringRoundTripTest() throws ParsingException { + Constant c = Expressions.makeAbstractConstant("c"); + Variable x = Expressions.makeUniversalVariable("X"); + Variable z = Expressions.makeExistentialVariable("Z"); + NegativeLiteral atom1 = Expressions.makeNegativeLiteral("p", x, c); + PositiveLiteral headAtom1 = Expressions.makePositiveLiteral("q", x, z); + Rule rule1 = Expressions.makeRule(headAtom1, atom1); + assertEquals(rule1, RuleParser.parseRule(rule1.toString())); + } + + @Test + public void DatatypeDoubleConstantToStringRoundTripTest() throws ParsingException { + String shortDoubleConstant = "12.345E67"; + assertEquals(shortDoubleConstant, + RuleParser.parseFact("p(\"12.345E67\"^^).").getArguments() + .get(0).toString()); + assertEquals(shortDoubleConstant, RuleParser.parseFact("p(12.345E67).").getArguments().get(0).toString()); + } + + @Test + public void DatatypeFloatConstantToStringRoundTripTest() throws ParsingException { + String floatConstant = "\"0.5\"^^"; + assertEquals(floatConstant, RuleParser.parseFact("p(\"0.5\"^^).") .getArguments().get(0).toString()); - assertEquals("1", RuleParser.parseFact("p(\"1\"^^).").getArguments() - .get(0).toString()); - assertEquals("0.5", RuleParser.parseFact("p(\"0.5\"^^).").getArguments() - .get(0).toString()); - assertEquals("0.5", RuleParser.parseFact("p(\"0.5\"^^).") + } + + @Test + public void DatatypeStringConstantToStringRoundTripTest() throws ParsingException { + String shortStringConstant = "\"data\""; + assertEquals(shortStringConstant, RuleParser + .parseFact("p(\"data\"^^).").getArguments().get(0).toString()); + assertEquals(shortStringConstant, RuleParser.parseFact("p(\"data\").").getArguments().get(0).toString()); + } + + @Test + public void DatatypeIntegerConstantToStringRoundTripTest() throws ParsingException { + String shortIntegerConstant = "1"; + assertEquals(shortIntegerConstant, RuleParser.parseFact("p(\"1\"^^).") .getArguments().get(0).toString()); + assertEquals(shortIntegerConstant, RuleParser.parseFact("p(1).").getArguments().get(0).toString()); + } + @Test + public void DatatypeDecimalToStringRoundTripTest() throws ParsingException { + String shortDecimalConstant = "0.23"; + assertEquals(shortDecimalConstant, + RuleParser.parseFact("p(\"0.23\"^^).").getArguments().get(0) + .toString()); + assertEquals(shortDecimalConstant, RuleParser.parseFact("p(0.23).").getArguments().get(0).toString()); } @Test - public void dataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { + public void sparqlDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { KnowledgeBase kb = new KnowledgeBase(); - final String INPUT_FOLDER = "src/test/data/input/"; - final String csvFile = INPUT_FOLDER + "file.csv"; - final File unzippedRdfFile = new File(INPUT_FOLDER + "file.nt"); Predicate predicate1 = Expressions.makePredicate("p", 3); - Predicate predicate2 = Expressions.makePredicate("q", 1); - final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource( - new URL("https://example.org/sparql"), "var", "?var wdt:P31 wd:Q5 ."); - final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File(csvFile)); - final RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(unzippedRdfFile); - final DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource); - final DataSourceDeclaration dataSourceDeclaration2 = new DataSourceDeclarationImpl(predicate2, - unzippedCsvFileDataSource); - final DataSourceDeclaration dataSourceDeclaration3 = new DataSourceDeclarationImpl(predicate1, - unzippedRdfFileDataSource); + SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(new URL("https://example.org/sparql"), + "var", "?var wdt:P31 wd:Q5 ."); + DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource); RuleParser.parseInto(kb, dataSourceDeclaration1.toString()); - RuleParser.parseInto(kb, dataSourceDeclaration2.toString()); - RuleParser.parseInto(kb, dataSourceDeclaration3.toString()); + assertEquals(dataSourceDeclaration1, kb.getDataSourceDeclarations().get(0)); + } + + @Test + public void rdfDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { + KnowledgeBase kb = new KnowledgeBase(); + String INPUT_FOLDER = "src/test/data/input/"; + File unzippedRdfFile = new File(INPUT_FOLDER + "file.nt"); + Predicate predicate1 = Expressions.makePredicate("p", 3); + RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(unzippedRdfFile); + DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate1, + unzippedRdfFileDataSource); + RuleParser.parseInto(kb, dataSourceDeclaration.toString()); + assertEquals(dataSourceDeclaration, kb.getDataSourceDeclarations().get(0)); + } + + @Test + public void csvDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { + KnowledgeBase kb = new KnowledgeBase(); + String INPUT_FOLDER = "src/test/data/input/"; + String csvFile = INPUT_FOLDER + "file.csv"; + Predicate predicate1 = Expressions.makePredicate("q", 1); + CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File(csvFile)); + final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate1, + unzippedCsvFileDataSource); + RuleParser.parseInto(kb, dataSourceDeclaration.toString()); + assertEquals(dataSourceDeclaration, kb.getDataSourceDeclarations().get(0)); } } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 7896177e2..c5d4b7ecb 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -418,6 +418,7 @@ public void DatatypeConstantgRoundTripTest() throws ParsingException { DatatypeConstantImpl datatypeConstantInteger = new DatatypeConstantImpl("1", PrefixDeclarations.XSD_INTEGER); DatatypeConstantImpl datatypeConstantFloat = new DatatypeConstantImpl("0.5", PrefixDeclarations.XSD_FLOAT); DatatypeConstantImpl datatypeConstantDouble = new DatatypeConstantImpl("0.5", PrefixDeclarations.XSD_DOUBLE); + DatatypeConstantImpl datatypeConstantDecimal = new DatatypeConstantImpl("0.23", PrefixDeclarations.XSD_DECIMAL); assertEquals(datatypeConstantString, RuleParser.parseFact("p(\"data\"^^).").getArguments().get(0)); assertEquals(datatypeConstantInteger, @@ -426,6 +427,8 @@ public void DatatypeConstantgRoundTripTest() throws ParsingException { RuleParser.parseFact("p(\"0.5\"^^).").getArguments().get(0)); assertEquals(datatypeConstantDouble, RuleParser.parseFact("p(\"0.5\"^^).").getArguments().get(0)); + assertEquals(datatypeConstantDecimal, + RuleParser.parseFact("p(\"0.23\"^^).").getArguments().get(0)); } From 4613849a75bed0ca9905a44b998dc64f54b98924 Mon Sep 17 00:00:00 2001 From: alloka Date: Tue, 19 Nov 2019 15:22:52 +0100 Subject: [PATCH 0369/1003] added roundtrip tests --- .../vlog4j/syntax/parser/EntityTest.java | 24 ------------------- 1 file changed, 24 deletions(-) diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java index 88d01b2f7..176277d5c 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java @@ -51,30 +51,6 @@ public class EntityTest { - final Variable y2 = Expressions.makeUniversalVariable("Y"); - final Constant d = Expressions.makeAbstractConstant("d"); - - final LanguageStringConstantImpl s = new LanguageStringConstantImpl("Test", "en"); - - // final PositiveLiteral positiveLiteral1 = Expressions.makePositiveLiteral("p", - // x, c); - // final PositiveLiteral positiveLiteral2 = Expressions.makePositiveLiteral("p", - // y2, x); - // final PositiveLiteral positiveLiteral3 = Expressions.makePositiveLiteral("q", - // x, d); - // final NegativeLiteral NegativeLiteral = Expressions.makeNegativeLiteral("r", - // x, d); - // final PositiveLiteral PositiveLiteral4 = Expressions.makePositiveLiteral("s", - // c, s); - // final List LiteralList = Arrays.asList(positiveLiteral1, - // positiveLiteral2, positiveLiteral3, - // NegativeLiteral, PositiveLiteral4); - - // final Conjunction bodyConjunction = new - // ConjunctionImpl<>(LiteralList); - - // final Rule rule2 = new RuleImpl(headPositiveLiterals, bodyConjunction); - @Test public void languageStringConstantToStringRoundTripTest() throws ParsingException { LanguageStringConstantImpl s = new LanguageStringConstantImpl("Test", "en"); From 0289cb3e8932803dbe326e1fd0b2438982afe409 Mon Sep 17 00:00:00 2001 From: alloka Date: Tue, 19 Nov 2019 16:36:00 +0100 Subject: [PATCH 0370/1003] modified test methods names --- .../vlog4j/syntax/parser/EntityTest.java | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java index 176277d5c..b0460357f 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java @@ -25,7 +25,6 @@ import java.io.IOException; import java.net.URL; import java.util.Arrays; - import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Constant; @@ -59,7 +58,7 @@ public void languageStringConstantToStringRoundTripTest() throws ParsingExceptio assertEquals(f3, RuleParser.parseFact(f3.toString())); } - public void AbstractConstantToStringRoundTripTest() throws ParsingException { + public void abstractConstantToStringRoundTripTest() throws ParsingException { AbstractConstantImpl f = new AbstractConstantImpl("f"); AbstractConstantImpl a = new AbstractConstantImpl("1"); Predicate p = Expressions.makePredicate("p", 1); @@ -85,7 +84,7 @@ public void ruleToStringRoundTripTest() throws ParsingException { } @Test - public void ConjunctionToStringRoundTripTest() throws ParsingException { + public void conjunctionToStringRoundTripTest() throws ParsingException { Constant c = Expressions.makeAbstractConstant("c"); Variable x = Expressions.makeUniversalVariable("X"); Variable y = Expressions.makeUniversalVariable("Y"); @@ -100,7 +99,7 @@ public void ConjunctionToStringRoundTripTest() throws ParsingException { } @Test - public void LiteralToStringRoundTripTest() throws ParsingException { + public void literalToStringRoundTripTest() throws ParsingException { Constant c = Expressions.makeAbstractConstant("c"); Variable x = Expressions.makeUniversalVariable("X"); Variable z = Expressions.makeExistentialVariable("Z"); @@ -111,7 +110,7 @@ public void LiteralToStringRoundTripTest() throws ParsingException { } @Test - public void DatatypeDoubleConstantToStringRoundTripTest() throws ParsingException { + public void datatypeDoubleConstantToStringRoundTripTest() throws ParsingException { String shortDoubleConstant = "12.345E67"; assertEquals(shortDoubleConstant, RuleParser.parseFact("p(\"12.345E67\"^^).").getArguments() @@ -120,14 +119,14 @@ public void DatatypeDoubleConstantToStringRoundTripTest() throws ParsingExceptio } @Test - public void DatatypeFloatConstantToStringRoundTripTest() throws ParsingException { + public void datatypeFloatConstantToStringRoundTripTest() throws ParsingException { String floatConstant = "\"0.5\"^^"; assertEquals(floatConstant, RuleParser.parseFact("p(\"0.5\"^^).") .getArguments().get(0).toString()); } @Test - public void DatatypeStringConstantToStringRoundTripTest() throws ParsingException { + public void datatypeStringConstantToStringRoundTripTest() throws ParsingException { String shortStringConstant = "\"data\""; assertEquals(shortStringConstant, RuleParser .parseFact("p(\"data\"^^).").getArguments().get(0).toString()); @@ -135,7 +134,7 @@ public void DatatypeStringConstantToStringRoundTripTest() throws ParsingExceptio } @Test - public void DatatypeIntegerConstantToStringRoundTripTest() throws ParsingException { + public void datatypeIntegerConstantToStringRoundTripTest() throws ParsingException { String shortIntegerConstant = "1"; assertEquals(shortIntegerConstant, RuleParser.parseFact("p(\"1\"^^).") .getArguments().get(0).toString()); @@ -143,7 +142,7 @@ public void DatatypeIntegerConstantToStringRoundTripTest() throws ParsingExcepti } @Test - public void DatatypeDecimalToStringRoundTripTest() throws ParsingException { + public void datatypeDecimalToStringRoundTripTest() throws ParsingException { String shortDecimalConstant = "0.23"; assertEquals(shortDecimalConstant, RuleParser.parseFact("p(\"0.23\"^^).").getArguments().get(0) From 0a6b92523d70bb2a507758cbf13f9a0443c82795 Mon Sep 17 00:00:00 2001 From: alloka Date: Tue, 19 Nov 2019 17:04:02 +0100 Subject: [PATCH 0371/1003] added getSyntacticRepresentation for InMemoryDataSource --- .../core/model/implementation/Serializer.java | 8 ++++---- .../implementation/InMemoryDataSource.java | 16 +++++++++++++--- 2 files changed, 17 insertions(+), 7 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index f53ca7088..c489abfc1 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -50,7 +50,7 @@ public final class Serializer { public static final String universalIdentifier = "?"; public static final String namedNullIdentifier = "_"; public static final String openParentheses = "("; - public static final String closeParentheses = ")"; + public static final String closingParentheses = ")"; public static final String ruleSeparator = ":-"; /** @@ -94,7 +94,7 @@ public static String getString(Literal literal) { } stringBuilder.append(term.getSyntacticRepresentation()); } - stringBuilder.append(closeParentheses); + stringBuilder.append(closingParentheses); return stringBuilder.toString(); } @@ -167,7 +167,7 @@ public static String getString(NamedNull namedNull) { * @return String representation corresponding to a given {@link Predicate}. */ public static String getString(Predicate predicate) { - return predicate.getName() + openParentheses + predicate.getArity() + closeParentheses; + return predicate.getName() + openParentheses + predicate.getArity() + closingParentheses; } /** @@ -180,7 +180,7 @@ public static String getString(Predicate predicate) { */ public static String getString(DataSourceDeclaration dataSourceDeclaration) { return "@source " + dataSourceDeclaration.getPredicate().getName() + openParentheses - + dataSourceDeclaration.getPredicate().getArity() + closeParentheses + ": " + + dataSourceDeclaration.getPredicate().getArity() + closingParentheses + ": " + dataSourceDeclaration.getDataSource().getSyntacticRepresentation(); } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java index c602b613a..ca59534dd 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java @@ -96,13 +96,23 @@ public String[][] getData() { * Returns null to indicate that this {@link DataSource} cannot be passed to * VLog in a configuration string. */ + @Override - public String toConfigString() { - return null; + public String getSyntacticRepresentation() { + + String message = "\\\\ This data source holds facts: \n"; + StringBuilder facts = new StringBuilder(""); + facts.append(message); + for (int i = 0; i < this.getData().length; i++) { + for (int j = 0; j < data[i].length; j++) { + facts.append(data[i][j] + "\n"); + } + } + return facts.toString(); } @Override - public String getSyntacticRepresentation() { + public String toConfigString() { // TODO Auto-generated method stub return null; } From e8d859eb4c6c1f1d4a9e6ba9e622e8321619cc38 Mon Sep 17 00:00:00 2001 From: alloka Date: Wed, 20 Nov 2019 00:02:01 +0100 Subject: [PATCH 0372/1003] fixed grammer --- .../core/model/implementation/Serializer.java | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index c489abfc1..2b66ec484 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -49,8 +49,8 @@ public final class Serializer { public static final String existentialIdentifier = "!"; public static final String universalIdentifier = "?"; public static final String namedNullIdentifier = "_"; - public static final String openParentheses = "("; - public static final String closingParentheses = ")"; + public static final String openParenthesis = "("; + public static final String closingParenthesis = ")"; public static final String ruleSeparator = ":-"; /** @@ -84,7 +84,7 @@ public static String getString(Literal literal) { if (literal.isNegated()) { stringBuilder.append(negativeIdentifier); } - stringBuilder.append(literal.getPredicate().getName()).append(openParentheses); + stringBuilder.append(literal.getPredicate().getName()).append(openParenthesis); boolean first = true; for (final Term term : literal.getArguments()) { if (first) { @@ -94,7 +94,7 @@ public static String getString(Literal literal) { } stringBuilder.append(term.getSyntacticRepresentation()); } - stringBuilder.append(closingParentheses); + stringBuilder.append(closingParenthesis); return stringBuilder.toString(); } @@ -167,7 +167,7 @@ public static String getString(NamedNull namedNull) { * @return String representation corresponding to a given {@link Predicate}. */ public static String getString(Predicate predicate) { - return predicate.getName() + openParentheses + predicate.getArity() + closingParentheses; + return predicate.getName() + openParenthesis + predicate.getArity() + closingParenthesis; } /** @@ -179,8 +179,8 @@ public static String getString(Predicate predicate) { * {@link DataSourceDeclaration}. */ public static String getString(DataSourceDeclaration dataSourceDeclaration) { - return "@source " + dataSourceDeclaration.getPredicate().getName() + openParentheses - + dataSourceDeclaration.getPredicate().getArity() + closingParentheses + ": " + return "@source " + dataSourceDeclaration.getPredicate().getName() + openParenthesis + + dataSourceDeclaration.getPredicate().getArity() + closingParenthesis + ": " + dataSourceDeclaration.getDataSource().getSyntacticRepresentation(); } From aafbdcd87e865f0e7c15dddb843368dcf61a0a0a Mon Sep 17 00:00:00 2001 From: alloka Date: Thu, 21 Nov 2019 22:56:16 +0100 Subject: [PATCH 0373/1003] added strings identifiers --- .../core/model/implementation/Serializer.java | 63 +++++++++++-------- .../implementation/InMemoryDataSource.java | 16 ++--- .../reasoner/implementation/VLogReasoner.java | 3 +- 3 files changed, 46 insertions(+), 36 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 2b66ec484..92cfe77ff 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -43,15 +43,22 @@ * */ public final class Serializer { - public static final String negativeIdentifier = "~"; - public static final String comma = ","; - public static final String dot = "."; - public static final String existentialIdentifier = "!"; - public static final String universalIdentifier = "?"; - public static final String namedNullIdentifier = "_"; - public static final String openParenthesis = "("; - public static final String closingParenthesis = ")"; - public static final String ruleSeparator = ":-"; + public static final String NEGATIVE_IDENTIFIER = "~"; + public static final String COMMA = ", "; + public static final String DOT = "."; + public static final String EXISTENTIAL_IDENTIFIER = "!"; + public static final String UNIVERSAL_IDENTIFIER = "?"; + public static final String NAMEDNULL_IDENTIFIER = "_"; + public static final String OPEN_PARENTHESIS = "("; + public static final String CLOSING_PARENTHESIS = ")"; + public static final String RULE_SEPARATOR = " :- "; + public static final String AT = "@"; + public static final String SOURCE = "@source "; + public static final String COLON = ": "; + public static final String CARET = "^"; + public static final String LESS_THAN = "<"; + public static final String MORE_THAN = ">"; + public static final String ESCAPED_QUOTE = "\""; /** * Constructor. @@ -60,6 +67,10 @@ private Serializer() { } + private static String escape(String string) { + return string.replace("\\", "\\\\").replace("\"", "\\\""); + } + /** * Creates a String representation of a given {@link Rule}. * @@ -69,7 +80,7 @@ private Serializer() { * */ public static String getString(Rule rule) { - return getString(rule.getHead()) + " " + ruleSeparator + " " + getString(rule.getBody()) + dot; + return getString(rule.getHead()) + RULE_SEPARATOR + getString(rule.getBody()) + DOT; } /** @@ -82,19 +93,19 @@ public static String getString(Rule rule) { public static String getString(Literal literal) { final StringBuilder stringBuilder = new StringBuilder(""); if (literal.isNegated()) { - stringBuilder.append(negativeIdentifier); + stringBuilder.append(NEGATIVE_IDENTIFIER); } - stringBuilder.append(literal.getPredicate().getName()).append(openParenthesis); + stringBuilder.append(literal.getPredicate().getName()).append(OPEN_PARENTHESIS); boolean first = true; for (final Term term : literal.getArguments()) { if (first) { first = false; } else { - stringBuilder.append(comma + " "); + stringBuilder.append(COMMA); } stringBuilder.append(term.getSyntacticRepresentation()); } - stringBuilder.append(closingParenthesis); + stringBuilder.append(CLOSING_PARENTHESIS); return stringBuilder.toString(); } @@ -106,7 +117,7 @@ public static String getString(Literal literal) { * @return String representation corresponding to a given {@link Fact}. */ public static String getFactString(Fact fact) { - return getString(fact) + dot; + return getString(fact) + DOT; } /** @@ -133,7 +144,7 @@ public static String getString(DatatypeConstant constant) { * {@link ExistentialVariable}. */ public static String getString(ExistentialVariable existentialVariable) { - return existentialIdentifier + existentialVariable.getName(); + return EXISTENTIAL_IDENTIFIER + existentialVariable.getName(); } /** @@ -145,7 +156,7 @@ public static String getString(ExistentialVariable existentialVariable) { * {@link UniversalVariable}. */ public static String getString(UniversalVariable universalVariable) { - return universalIdentifier + universalVariable.getName(); + return UNIVERSAL_IDENTIFIER + universalVariable.getName(); } /** @@ -156,7 +167,7 @@ public static String getString(UniversalVariable universalVariable) { * @return String representation corresponding to a given {@link NamedNull}. */ public static String getString(NamedNull namedNull) { - return namedNullIdentifier + namedNull.getName(); + return NAMEDNULL_IDENTIFIER + namedNull.getName(); } /** @@ -167,7 +178,7 @@ public static String getString(NamedNull namedNull) { * @return String representation corresponding to a given {@link Predicate}. */ public static String getString(Predicate predicate) { - return predicate.getName() + openParenthesis + predicate.getArity() + closingParenthesis; + return predicate.getName() + OPEN_PARENTHESIS + predicate.getArity() + CLOSING_PARENTHESIS; } /** @@ -179,8 +190,8 @@ public static String getString(Predicate predicate) { * {@link DataSourceDeclaration}. */ public static String getString(DataSourceDeclaration dataSourceDeclaration) { - return "@source " + dataSourceDeclaration.getPredicate().getName() + openParenthesis - + dataSourceDeclaration.getPredicate().getArity() + closingParenthesis + ": " + return SOURCE + dataSourceDeclaration.getPredicate().getName() + OPEN_PARENTHESIS + + dataSourceDeclaration.getPredicate().getArity() + CLOSING_PARENTHESIS + COLON + dataSourceDeclaration.getDataSource().getSyntacticRepresentation(); } @@ -198,7 +209,7 @@ public static String getString(Conjunction conjunction) { if (first) { first = false; } else { - stringBuilder.append(comma + " "); + stringBuilder.append(COMMA); } stringBuilder.append(getString(literal)); } @@ -215,7 +226,7 @@ public static String getString(Conjunction conjunction) { * {@link LanguageStringConstant}. */ public static String getConstantName(LanguageStringConstant languageStringConstant) { - return "\"" + languageStringConstant.getString().replace("\\", "\\\\").replace("\"", "\\\"") + "\"@" + return ESCAPED_QUOTE + escape(languageStringConstant.getString()) + ESCAPED_QUOTE + AT + languageStringConstant.getLanguageTag(); } @@ -230,7 +241,7 @@ public static String getConstantName(LanguageStringConstant languageStringConsta */ public static String getShortConstantName(DatatypeConstant datatypeConstant) { if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_STRING)) { - return "\"" + datatypeConstant.getLexicalValue() + "\""; + return ESCAPED_QUOTE + datatypeConstant.getLexicalValue() + ESCAPED_QUOTE; } else { if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DECIMAL) || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_INTEGER) @@ -253,8 +264,8 @@ public static String getShortConstantName(DatatypeConstant datatypeConstant) { * {@link DatatypeConstant}. */ public static String getConstantName(DatatypeConstant datatypeConstant) { - return "\"" + datatypeConstant.getLexicalValue().replace("\\", "\\\\").replace("\"", "\\\"") + "\"^^<" - + datatypeConstant.getDatatype() + ">"; + return ESCAPED_QUOTE + escape(datatypeConstant.getLexicalValue()) + ESCAPED_QUOTE + CARET + CARET + LESS_THAN + + datatypeConstant.getDatatype() + MORE_THAN; } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java index ca59534dd..8c005a782 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java @@ -92,28 +92,28 @@ public String[][] getData() { } } - /** - * Returns null to indicate that this {@link DataSource} cannot be passed to - * VLog in a configuration string. - */ - @Override public String getSyntacticRepresentation() { - String message = "\\\\ This data source holds facts: \n"; + String message = "This InMemoryDataSource holds the following tuples of constant names, one tuple per line:"; StringBuilder facts = new StringBuilder(""); facts.append(message); for (int i = 0; i < this.getData().length; i++) { for (int j = 0; j < data[i].length; j++) { - facts.append(data[i][j] + "\n"); + facts.append(data[i][j] + " "); } + facts.append("\n"); } return facts.toString(); } + /** + * Returns null to indicate that this {@link DataSource} cannot be passed to + * VLog in a configuration string. + */ + @Override public String toConfigString() { - // TODO Auto-generated method stub return null; } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 6bda54351..b0c123859 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -11,7 +11,6 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.lang3.NotImplementedException; import org.apache.commons.lang3.Validate; import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; @@ -133,7 +132,7 @@ public boolean equals(Object obj) { @Override public String getSyntacticRepresentation() { - throw new NotImplementedException( + throw new UnsupportedOperationException( "This method is not implemented for type LocalFactsDataSourceDeclaration"); } From 3db65a2da653181efa1d1b1dde7032cfc8bbff45 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Mon, 25 Nov 2019 14:53:00 +0100 Subject: [PATCH 0374/1003] rework after review: - make getters and setters and isValid methods public - print message and exit instead of throwing a RuntimeException --- .../client/picocli/PrintQueryResults.java | 30 ++--- .../org/vlog4j/client/picocli/SaveModel.java | 32 ++--- .../client/picocli/SaveQueryResults.java | 59 ++++----- .../picocli/VLog4jClientMaterialize.java | 115 +++++++++--------- 4 files changed, 118 insertions(+), 118 deletions(-) diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/PrintQueryResults.java b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/PrintQueryResults.java index cd32d547a..f80c226f2 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/PrintQueryResults.java +++ b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/PrintQueryResults.java @@ -30,7 +30,7 @@ */ public class PrintQueryResults { - static final String configurationErrorMessage = "Configuration Error: @code{--print-query-result-size} and @code{--print-query-result} are mutually exclusive. Set only one to true.\n Exiting the program."; + static final String configurationErrorMessage = "Configuration Error: @code{--print-query-result-size} and @code{--print-query-result} are mutually exclusive. Set only one to true."; /** * If true, Vlog4jClient will print the size of the query result. Mutually @@ -53,7 +53,7 @@ public class PrintQueryResults { public PrintQueryResults() { } - public PrintQueryResults(boolean sizeOnly, boolean complete) { + public PrintQueryResults(final boolean sizeOnly, final boolean complete) { this.sizeOnly = sizeOnly; this.complete = complete; } @@ -64,28 +64,28 @@ public PrintQueryResults(boolean sizeOnly, boolean complete) { * * @return @code{true} if configuration is valid. */ - protected boolean isValid() { - return !sizeOnly || !complete; + public boolean isValid() { + return !this.sizeOnly || !this.complete; } - protected void printConfiguration() { - System.out.println(" --print-query-result-size: " + sizeOnly); - System.out.println(" --print-complete-query-result: " + complete); + public boolean isSizeOnly() { + return this.sizeOnly; } - protected boolean isSizeOnly() { - return sizeOnly; - } - - protected void setSizeOnly(boolean sizeOnly) { + public void setSizeOnly(final boolean sizeOnly) { this.sizeOnly = sizeOnly; } - protected boolean isComplete() { - return complete; + public boolean isComplete() { + return this.complete; } - protected void setComplete(boolean complete) { + public void setComplete(final boolean complete) { this.complete = complete; } + + void printConfiguration() { + System.out.println(" --print-query-result-size: " + this.sizeOnly); + System.out.println(" --print-complete-query-result: " + this.complete); + } } diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveModel.java b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveModel.java index b74a9b43a..6cb05f4b7 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveModel.java +++ b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveModel.java @@ -32,8 +32,8 @@ */ public class SaveModel { - static final String configurationErrorMessage = "Configuration Error: If @code{--save-model} is true, then a non empty @code{--output-model-directory} is required.\nExiting the program."; - static final String wrongDirectoryErrorMessage = "Configuration Error: wrong @code{--output-model-directory}. Please check the path.\nExiting the program."; + static final String configurationErrorMessage = "Configuration Error: If @code{--save-model} is true, then a non empty @code{--output-model-directory} is required."; + static final String wrongDirectoryErrorMessage = "Configuration Error: wrong @code{--output-model-directory}. Please check the path."; /** * If true, Vlog4jClient will save the model in {@code --output-model-directory} @@ -54,7 +54,7 @@ public class SaveModel { public SaveModel() { } - public SaveModel(boolean saveModel, String outputDir) { + public SaveModel(final boolean saveModel, final String outputDir) { this.saveModel = saveModel; this.outputModelDirectory = outputDir; } @@ -65,8 +65,8 @@ public SaveModel(boolean saveModel, String outputDir) { * * @return @code{true} if configuration is valid. */ - protected boolean isConfigurationValid() { - return !saveModel || (outputModelDirectory != null && !outputModelDirectory.isEmpty()); + public boolean isConfigurationValid() { + return !this.saveModel || ((this.outputModelDirectory != null) && !this.outputModelDirectory.isEmpty()); } /** @@ -74,17 +74,17 @@ protected boolean isConfigurationValid() { * * @return @code{true} if conditions are satisfied. */ - protected boolean isDirectoryValid() { - File file = new File(outputModelDirectory); + public boolean isDirectoryValid() { + final File file = new File(this.outputModelDirectory); return !file.exists() || file.isDirectory(); } /** * Create directory to store the model */ - public void mkdir() { - if (saveModel) { - File file = new File(outputModelDirectory); + void mkdir() { + if (this.saveModel) { + final File file = new File(this.outputModelDirectory); if (!file.exists()) { file.mkdirs(); } @@ -92,23 +92,23 @@ public void mkdir() { } public void printConfiguration() { - System.out.println(" --save-model: " + saveModel); - System.out.println(" --output-model-directory: " + outputModelDirectory); + System.out.println(" --save-model: " + this.saveModel); + System.out.println(" --output-model-directory: " + this.outputModelDirectory); } public boolean isSaveModel() { - return saveModel; + return this.saveModel; } - public void setSaveModel(boolean saveModel) { + public void setSaveModel(final boolean saveModel) { this.saveModel = saveModel; } public String getOutputModelDirectory() { - return outputModelDirectory; + return this.outputModelDirectory; } - public void setOutputModelDirectory(String outputModelDirectory) { + public void setOutputModelDirectory(final String outputModelDirectory) { this.outputModelDirectory = outputModelDirectory; } diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveQueryResults.java b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveQueryResults.java index a0c20f91f..735b63a4b 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveQueryResults.java +++ b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveQueryResults.java @@ -32,8 +32,8 @@ */ public class SaveQueryResults { - static final String configurationErrorMessage = "Configuration Error: If @code{--save-query-results} is true, then a non empty @code{--output-query-result-directory} is required.\nExiting the program."; - static final String wrongDirectoryErrorMessage = "Configuration Error: wrong @code{--output-query-result-directory}. Please check the path.\nExiting the program."; + static final String configurationErrorMessage = "Configuration Error: If @code{--save-query-results} is true, then a non empty @code{--output-query-result-directory} is required."; + static final String wrongDirectoryErrorMessage = "Configuration Error: wrong @code{--output-query-result-directory}. Please check the path."; /** * If true, Vlog4jClient will save the query result in @@ -56,7 +56,7 @@ public class SaveQueryResults { public SaveQueryResults() { } - public SaveQueryResults(boolean saveResults, String outputDir) { + public SaveQueryResults(final boolean saveResults, final String outputDir) { this.saveResults = saveResults; this.outputQueryResultDirectory = outputDir; } @@ -67,8 +67,9 @@ public SaveQueryResults(boolean saveResults, String outputDir) { * * @return @code{true} if configuration is valid. */ - protected boolean isConfigurationValid() { - return !saveResults || (outputQueryResultDirectory != null && !outputQueryResultDirectory.isEmpty()); + public boolean isConfigurationValid() { + return !this.saveResults + || ((this.outputQueryResultDirectory != null) && !this.outputQueryResultDirectory.isEmpty()); } /** @@ -77,43 +78,43 @@ protected boolean isConfigurationValid() { * * @return @code{true} if conditions are satisfied. */ - protected boolean isDirectoryValid() { - File file = new File(outputQueryResultDirectory); + public boolean isDirectoryValid() { + final File file = new File(this.outputQueryResultDirectory); return !file.exists() || file.isDirectory(); } - /** - * Create directory to store query results if not present. It assumes that - * configuration and directory are valid. - */ - protected void mkdir() { - if (saveResults) { - File file = new File(outputQueryResultDirectory); - if (!file.exists()) { - file.mkdirs(); - } - } + public boolean isSaveResults() { + return this.saveResults; } - protected void printConfiguration() { - System.out.println(" --save-query-results: " + saveResults); - System.out.println(" --output-query-result-directory: " + outputQueryResultDirectory); + public void setSaveResults(final boolean saveResults) { + this.saveResults = saveResults; } - protected boolean isSaveResults() { - return saveResults; + public String getOutputQueryResultDirectory() { + return this.outputQueryResultDirectory; } - protected void setSaveResults(boolean saveResults) { - this.saveResults = saveResults; + public void setOutputQueryResultDirectory(final String outputQueryResultDirectory) { + this.outputQueryResultDirectory = outputQueryResultDirectory; } - protected String getOutputQueryResultDirectory() { - return outputQueryResultDirectory; + /** + * Create directory to store query results if not present. It assumes that + * configuration and directory are valid. + */ + void mkdir() { + if (this.saveResults) { + final File file = new File(this.outputQueryResultDirectory); + if (!file.exists()) { + file.mkdirs(); + } + } } - protected void setOutputQueryResultDirectory(String outputQueryResultDirectory) { - this.outputQueryResultDirectory = outputQueryResultDirectory; + void printConfiguration() { + System.out.println(" --save-query-results: " + this.saveResults); + System.out.println(" --output-query-result-directory: " + this.outputQueryResultDirectory); } } diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java index 75973312f..e2c0f790c 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java +++ b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java @@ -53,7 +53,7 @@ public class VLog4jClientMaterialize implements Runnable { private final List queries = new ArrayList<>(); @Option(names = "--rule-file", description = "Rule file(s) in {@link https://github.com/knowsys/vlog4j/wiki/Rule-syntax-grammar} syntax", required = true) - private List ruleFiles = new ArrayList<>(); + private final List ruleFiles = new ArrayList<>(); // TODO // Support graal rule files @@ -61,25 +61,25 @@ public class VLog4jClientMaterialize implements Runnable { // private List graalRuleFiles = new ArrayList<>(); @Option(names = "--log-level", description = "Log level of VLog (c++ library). One of: DEBUG, INFO, WARNING (default), ERROR.", required = false) - private LogLevel logLevel = LogLevel.WARNING; + private final LogLevel logLevel = LogLevel.WARNING; @Option(names = "--log-file", description = "Log file of VLog (c++ library). VLog will log to the default system output by default", required = false) private String logFile; @Option(names = "--chase-algorithm", description = "Chase algorithm. RESTRICTED_CHASE (default) or SKOLEM_CHASE.", required = false) - private Algorithm chaseAlgorithm = Algorithm.RESTRICTED_CHASE; + private final Algorithm chaseAlgorithm = Algorithm.RESTRICTED_CHASE; @Option(names = "--timeout", description = "Timeout in seconds. Infinite by default", required = false) - private int timeout = 0; + private final int timeout = 0; @Option(names = "--query", description = "Positive not-ground Literals to query after materialization in rls syntax. Vlog4jClient will print the size of its extension", required = true) - private List queryStrings = new ArrayList<>(); + private final List queryStrings = new ArrayList<>(); @ArgGroup(exclusive = false) - private PrintQueryResults printQueryResults = new PrintQueryResults(); + private final PrintQueryResults printQueryResults = new PrintQueryResults(); @ArgGroup(exclusive = false) - private SaveQueryResults saveQueryResults = new SaveQueryResults(); + private final SaveQueryResults saveQueryResults = new SaveQueryResults(); // TODO // @ArgGroup(exclusive = false) @@ -101,7 +101,7 @@ public void run() { /* Print configuration */ this.printConfiguration(); - try (Reasoner reasoner = new VLogReasoner(kb)) { + try (Reasoner reasoner = new VLogReasoner(this.kb)) { this.materialize(reasoner); // TODO if (saveModel.saveModel) { this.saveModel(); } @@ -112,14 +112,14 @@ public void run() { } private void validateConfiguration() { - if (!printQueryResults.isValid()) { - printMessageAndExit(PrintQueryResults.configurationErrorMessage); + if (!this.printQueryResults.isValid()) { + this.printErrorMessageAndExit(PrintQueryResults.configurationErrorMessage); } - if (!saveQueryResults.isConfigurationValid()) { - printMessageAndExit(SaveQueryResults.configurationErrorMessage); + if (!this.saveQueryResults.isConfigurationValid()) { + this.printErrorMessageAndExit(SaveQueryResults.configurationErrorMessage); } - if (saveQueryResults.isSaveResults() && !saveQueryResults.isDirectoryValid()) { - printMessageAndExit(SaveQueryResults.wrongDirectoryErrorMessage); + if (this.saveQueryResults.isSaveResults() && !this.saveQueryResults.isDirectoryValid()) { + this.printErrorMessageAndExit(SaveQueryResults.wrongDirectoryErrorMessage); } // TODO // if (!saveModel.isConfigurationValid()) { @@ -131,25 +131,23 @@ private void validateConfiguration() { } private void configureRules() { - for (String ruleFile : ruleFiles) { + for (final String ruleFile : this.ruleFiles) { try { - RuleParser.parseInto(kb, new FileInputStream(ruleFile)); - } catch (FileNotFoundException e1) { - throw new RuntimeException( - "File not found: " + ruleFile + ". " + e1.getMessage() + "\nExiting the program."); - } catch (ParsingException e2) { - throw new RuntimeException( - "Failed to parse rule file: " + ruleFile + ". " + e2.getMessage() + "\nExiting the program."); + RuleParser.parseInto(this.kb, new FileInputStream(ruleFile)); + } catch (final FileNotFoundException e1) { + this.printErrorMessageAndExit("File not found: " + ruleFile + "\n " + e1.getMessage()); + } catch (final ParsingException e2) { + this.printErrorMessageAndExit("Failed to parse rule file: " + ruleFile + "\n " + e2.getMessage()); } } } private void configureQueries() { - for (String queryString : queryStrings) { + for (final String queryString : this.queryStrings) { try { final PositiveLiteral query = RuleParser.parsePositiveLiteral(queryString); - queries.add(query); - } catch (ParsingException e) { + this.queries.add(query); + } catch (final ParsingException e) { System.err.println("Failed to parse query: \"\"\"" + queryString + "\"\"\"."); System.err.println(e.getMessage()); System.err.println("The query was skipped. Continuing ..."); @@ -157,43 +155,43 @@ private void configureQueries() { } } - private void materialize(Reasoner reasoner) { + private void materialize(final Reasoner reasoner) { // logFile - reasoner.setLogFile(logFile); + reasoner.setLogFile(this.logFile); // logLevel - reasoner.setLogLevel(logLevel); + reasoner.setLogLevel(this.logLevel); // chaseAlgorithm - reasoner.setAlgorithm(chaseAlgorithm); + reasoner.setAlgorithm(this.chaseAlgorithm); // timeout - if (timeout > 0) { - reasoner.setReasoningTimeout(timeout); + if (this.timeout > 0) { + reasoner.setReasoningTimeout(this.timeout); } System.out.println("Executing the chase ..."); try { reasoner.reason(); - } catch (IOException e) { - throw new RuntimeException( - "Something went wrong. Please check the log file." + e.getMessage() + "\nExiting the program."); + } catch (final IOException e) { + this.printErrorMessageAndExit( + "Something went wrong during reasoning. Please check the reasoner log file.\n" + e.getMessage()); } } // TODO private void saveModel() {...} - private void answerQueries(Reasoner reasoner) { - if (!queries.isEmpty()) { + private void answerQueries(final Reasoner reasoner) { + if (!this.queries.isEmpty()) { System.out.println("Answering queries ..."); - for (PositiveLiteral query : queries) { - if (saveQueryResults.isSaveResults()) { + for (final PositiveLiteral query : this.queries) { + if (this.saveQueryResults.isSaveResults()) { // Save the query results - doSaveQueryResults(reasoner, query); + this.doSaveQueryResults(reasoner, query); } - if (printQueryResults.isSizeOnly()) { + if (this.printQueryResults.isSizeOnly()) { // print number of facts in results - doPrintResults(reasoner, query); - } else if (printQueryResults.isComplete()) { + this.doPrintResults(reasoner, query); + } else if (this.printQueryResults.isComplete()) { // print facts ExamplesUtils.printOutQueryAnswers(query, reasoner); } @@ -204,46 +202,47 @@ private void answerQueries(Reasoner reasoner) { private void printConfiguration() { System.out.println("Configuration:"); - for (String ruleFile : ruleFiles) { + for (final String ruleFile : this.ruleFiles) { System.out.println(" --rule-file: " + ruleFile); } - for (PositiveLiteral query : queries) { + for (final PositiveLiteral query : this.queries) { System.out.println(" --query: " + query); } - System.out.println(" --log-file: " + logFile); - System.out.println(" --log-level: " + logLevel); - System.out.println(" --chase-algorithm: " + chaseAlgorithm); - System.out.println(" --timeout: " + ((timeout > 0) ? timeout : "none")); + System.out.println(" --log-file: " + this.logFile); + System.out.println(" --log-level: " + this.logLevel); + System.out.println(" --chase-algorithm: " + this.chaseAlgorithm); + System.out.println(" --timeout: " + ((this.timeout > 0) ? this.timeout : "none")); /* Print what to do with the result */ - printQueryResults.printConfiguration(); - saveQueryResults.printConfiguration(); + this.printQueryResults.printConfiguration(); + this.saveQueryResults.printConfiguration(); // TODO saveModel.printConfiguration(); } - private void doSaveQueryResults(Reasoner reasoner, PositiveLiteral query) { - saveQueryResults.mkdir(); + private void doSaveQueryResults(final Reasoner reasoner, final PositiveLiteral query) { + this.saveQueryResults.mkdir(); try { - reasoner.exportQueryAnswersToCsv(query, queryOputputPath(query), true); - } catch (IOException e) { + reasoner.exportQueryAnswersToCsv(query, this.queryOputputPath(query), true); + } catch (final IOException e) { System.err.println("Can't save query: \"\"\"" + query + "\"\"\"."); System.err.println(e.getMessage()); } } - private void doPrintResults(Reasoner reasoner, PositiveLiteral query) { + private void doPrintResults(final Reasoner reasoner, final PositiveLiteral query) { System.out.println( "Number of query answers in " + query + ": " + ExamplesUtils.getQueryAnswerCount(query, reasoner)); } - private String queryOputputPath(PositiveLiteral query) { - return saveQueryResults.getOutputQueryResultDirectory() + "/" + query + ".csv"; + private String queryOputputPath(final PositiveLiteral query) { + return this.saveQueryResults.getOutputQueryResultDirectory() + "/" + query + ".csv"; } - private void printMessageAndExit(String message) { + private void printErrorMessageAndExit(final String message) { System.err.println(message); + System.out.println("Exiting the program."); System.exit(1); } From 85b669255a098e4872e212cfd23f31fb5da81c9d Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Mon, 25 Nov 2019 16:50:24 +0100 Subject: [PATCH 0375/1003] extract default directory names into constants --- .../org/vlog4j/client/picocli/SaveModel.java | 7 +- .../client/picocli/SaveQueryResults.java | 6 +- .../client/picocli/PrintQueryResultsTest.java | 104 +++++++----------- .../vlog4j/client/picocli/SaveModelTest.java | 99 +++++++++-------- .../client/picocli/SaveQueryResultsTest.java | 67 ++--------- 5 files changed, 115 insertions(+), 168 deletions(-) diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveModel.java b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveModel.java index 6cb05f4b7..fb03cf117 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveModel.java +++ b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveModel.java @@ -32,6 +32,8 @@ */ public class SaveModel { + public static final String DEFAULT_OUTPUT_DIR_NAME = "model"; + static final String configurationErrorMessage = "Configuration Error: If @code{--save-model} is true, then a non empty @code{--output-model-directory} is required."; static final String wrongDirectoryErrorMessage = "Configuration Error: wrong @code{--output-model-directory}. Please check the path."; @@ -48,8 +50,9 @@ public class SaveModel { * * @default "model" */ - @Option(names = "--output-model-directory", description = "Directory to store the model. Used only if --store-model is true. \"model\" by default.") - private String outputModelDirectory = "model"; + @Option(names = "--output-model-directory", description = "Directory to store the model. Used only if --store-model is true. \"" + + DEFAULT_OUTPUT_DIR_NAME + "\" by default.") + private String outputModelDirectory = DEFAULT_OUTPUT_DIR_NAME; public SaveModel() { } diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveQueryResults.java b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveQueryResults.java index 735b63a4b..441aa359f 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveQueryResults.java +++ b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveQueryResults.java @@ -31,6 +31,7 @@ * */ public class SaveQueryResults { + public static final String DEFAULT_OUTPUT_DIR_NAME = "query-results"; static final String configurationErrorMessage = "Configuration Error: If @code{--save-query-results} is true, then a non empty @code{--output-query-result-directory} is required."; static final String wrongDirectoryErrorMessage = "Configuration Error: wrong @code{--output-query-result-directory}. Please check the path."; @@ -50,8 +51,9 @@ public class SaveQueryResults { * * @default query-results */ - @Option(names = "--output-query-result-directory", description = "Directory to store the model. Used only if --save-query-results is true. \"query-results\" by default.") - private String outputQueryResultDirectory = "query-results"; + @Option(names = "--output-query-result-directory", description = "Directory to store the model. Used only if --save-query-results is true. \"" + + DEFAULT_OUTPUT_DIR_NAME + "\" by default.") + private String outputQueryResultDirectory = DEFAULT_OUTPUT_DIR_NAME; public SaveQueryResults() { } diff --git a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/PrintQueryResultsTest.java b/vlog4j-client/src/test/java/org/vlog4j/client/picocli/PrintQueryResultsTest.java index 686fe7506..13469c358 100644 --- a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/PrintQueryResultsTest.java +++ b/vlog4j-client/src/test/java/org/vlog4j/client/picocli/PrintQueryResultsTest.java @@ -1,43 +1,22 @@ package org.vlog4j.client.picocli; -/*- - * #%L - * VLog4j Client - * %% - * Copyright (C) 2018 - 2019 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; -import java.io.ByteArrayOutputStream; -import java.io.PrintStream; - import org.junit.Test; -import org.vlog4j.client.picocli.PrintQueryResults; public class PrintQueryResultsTest { - String outputConfigurationBase = " --print-query-result-size: %b\n --print-complete-query-result: %b\n"; - private final PrintQueryResults sizeTrueCompleteTrue = new PrintQueryResults(true, true); - private final PrintQueryResults sizeTrueCompleteFalse = new PrintQueryResults(true, false); - private final PrintQueryResults sizeFalseCompleteTrue = new PrintQueryResults(false, true); - private final PrintQueryResults sizeFalseCompleteFalse = new PrintQueryResults(false, false); + private static final PrintQueryResults sizeTrueCompleteTrue = new PrintQueryResults(); + private static final PrintQueryResults sizeTrueCompleteFalse = new PrintQueryResults(); + private static final PrintQueryResults sizeFalseCompleteTrue = new PrintQueryResults(false, true); + private static final PrintQueryResults sizeFalseCompleteFalse = new PrintQueryResults(); + static { + sizeTrueCompleteTrue.setComplete(true); + sizeFalseCompleteFalse.setSizeOnly(false); + } + @Test public void isValid_sizeTrueCompleteFalse_valid() { // default configuration @@ -45,13 +24,13 @@ public void isValid_sizeTrueCompleteFalse_valid() { } @Test - public void isValid_sizeFalseCompleteTrue_valid() { - assertTrue(sizeFalseCompleteTrue.isValid()); + public void isValid_sizeTrueCompleteTrue_notValid() { + assertFalse(sizeTrueCompleteTrue.isValid()); } @Test - public void isValid_sizeTrueCompleteTrue_notValid() { - assertFalse(sizeTrueCompleteTrue.isValid()); + public void isValid_sizeFalseCompleteTrue_valid() { + assertTrue(sizeFalseCompleteTrue.isValid()); } @Test @@ -60,27 +39,43 @@ public void isValid_sizeFalseCompleteFalse_valid() { } @Test - public void printConfiguration_sizeTrueCompleteFalse() { - assertEquals(String.format(outputConfigurationBase, true, false), - captureOutputPrintConfiguration(sizeTrueCompleteFalse)); + public void isSizeOnly_sizeFalseCompleteTrue() { + assertFalse(sizeFalseCompleteTrue.isSizeOnly()); + } + + @Test + public void isSizeOnly_sizeTrueCompleteTrue() { + assertTrue(sizeTrueCompleteTrue.isSizeOnly()); + } + + @Test + public void isSizeOnly_sizeTrueCompleteFalse() { + assertTrue(sizeTrueCompleteFalse.isSizeOnly()); + } + + @Test + public void isSizeOnly_sizeFalseCompleteFalse() { + assertFalse(sizeFalseCompleteFalse.isSizeOnly()); + } + + @Test + public void isComplete_sizeTrueCompleteFalse() { + assertFalse(sizeTrueCompleteFalse.isComplete()); } @Test - public void printConfiguration_sizeFalseCompleteTrue() { - assertEquals(String.format(outputConfigurationBase, false, true), - captureOutputPrintConfiguration(sizeFalseCompleteTrue)); + public void isComplete_sizeTrueCompleteTrue() { + assertTrue(sizeTrueCompleteTrue.isComplete()); } @Test - public void printConfiguration_sizeTrueCompleteTrue() { - assertEquals(String.format(outputConfigurationBase, true, true), - captureOutputPrintConfiguration(sizeTrueCompleteTrue)); + public void isComplete_sizeFalseCompleteTrue() { + assertTrue(sizeFalseCompleteTrue.isComplete()); } @Test - public void printConfiguration_sizeFalseCompleteFalse() { - assertEquals(String.format(outputConfigurationBase, false, false), - captureOutputPrintConfiguration(sizeFalseCompleteFalse)); + public void isComplete_sizeFalseCompleteFalse() { + assertFalse(sizeFalseCompleteFalse.isComplete()); } @Test @@ -101,21 +96,4 @@ public void setComplete_and_isComplete() { assertTrue(prq.isComplete()); } - private String captureOutputPrintConfiguration(PrintQueryResults prq) { - // Output Variables - ByteArrayOutputStream result = new ByteArrayOutputStream(); - PrintStream ps = new PrintStream(result); - // Save default System.out - PrintStream systemOut = System.out; - // Change System.out - System.setOut(ps); - // Do something - prq.printConfiguration(); - // Restore previous state - System.out.flush(); - System.setOut(systemOut); - // return result - return result.toString(); - } - } diff --git a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveModelTest.java b/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveModelTest.java index 53d80e267..5be484df7 100644 --- a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveModelTest.java +++ b/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveModelTest.java @@ -5,11 +5,10 @@ import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; -import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; -import java.io.PrintStream; +import org.apache.commons.lang3.StringUtils; import org.junit.Rule; /*- @@ -34,18 +33,20 @@ import org.junit.Test; import org.junit.rules.TemporaryFolder; -import org.vlog4j.client.picocli.SaveModel; public class SaveModelTest { - private final String outputConfigurationBase = " --save-model: %b\n --output-model-directory: %s\n"; - private final String defaultDir = "model"; - private final SaveModel saveTrueDefaultDir = new SaveModel(true, defaultDir); - private final SaveModel saveTrueEmptyDir = new SaveModel(true, ""); - private final SaveModel saveTrueNullDir = new SaveModel(true, null); - private final SaveModel saveFalseDefaultDir = new SaveModel(false, defaultDir); - private final SaveModel saveFalseEmptyDir = new SaveModel(false, ""); - private final SaveModel saveFalseNullDir = new SaveModel(false, null); + private final static SaveModel saveTrueDefaultDir = new SaveModel(); + private final static SaveModel saveTrueEmptyDir = new SaveModel(true, ""); + private final static SaveModel saveTrueNullDir = new SaveModel(true, null); + private final static SaveModel saveFalseDefaultDir = new SaveModel(); + private final static SaveModel saveFalseEmptyDir = new SaveModel(false, ""); + private final static SaveModel saveFalseNullDir = new SaveModel(false, null); + + static { + saveTrueDefaultDir.setSaveModel(true); + saveFalseDefaultDir.setSaveModel(false); + } @Rule public TemporaryFolder tempFolder = new TemporaryFolder(); @@ -132,39 +133,63 @@ public void mkdir_saveFalse() throws IOException { } @Test - public void printConfiguration_saveTrueDefaultDir() { - assertEquals(String.format(outputConfigurationBase, true, defaultDir), - captureOutputPrintConfiguration(saveTrueDefaultDir)); + public void isSaveModel_saveTrueDefaultDir() { + assertTrue(saveTrueDefaultDir.isSaveModel()); + } + + @Test + public void getOutputModelDirectory_saveTrueDefaultDir() { + assertEquals(SaveModel.DEFAULT_OUTPUT_DIR_NAME, saveTrueDefaultDir.getOutputModelDirectory()); + } + + @Test + public void isSaveModel_saveTrueEmptyDir() { + assertTrue(saveTrueEmptyDir.isSaveModel()); + } + + @Test + public void getOutputModelDirectory_saveTrueEmptyDir() { + assertEquals(StringUtils.EMPTY, saveTrueEmptyDir.getOutputModelDirectory()); } @Test - public void printConfiguration_saveTrueEmptyDir() { - assertEquals(String.format(outputConfigurationBase, true, ""), - captureOutputPrintConfiguration(saveTrueEmptyDir)); + public void isSaveModel_saveTrueNullDir() { + assertTrue(saveTrueNullDir.isSaveModel()); } @Test - public void printConfiguration_saveTrueNullDir() { - assertEquals(String.format(outputConfigurationBase, true, null), - captureOutputPrintConfiguration(saveTrueNullDir)); + public void getOutputModelDirectory_saveTrueNullDir() { + assertNull(saveTrueNullDir.getOutputModelDirectory()); } @Test - public void printConfiguration_saveFalseDefaultDir() { - assertEquals(String.format(outputConfigurationBase, false, defaultDir), - captureOutputPrintConfiguration(saveFalseDefaultDir)); + public void isSaveModel_saveFalseDefaultDir() { + assertFalse(saveFalseDefaultDir.isSaveModel()); } @Test - public void printConfiguration_saveFalseEmptyDir() { - assertEquals(String.format(outputConfigurationBase, false, ""), - captureOutputPrintConfiguration(saveFalseEmptyDir)); + public void getOutputModelDirectory_saveFalseDefaultDir() { + assertEquals(SaveModel.DEFAULT_OUTPUT_DIR_NAME, saveFalseDefaultDir.getOutputModelDirectory()); } @Test - public void printConfiguration_saveFalseNullDir() { - assertEquals(String.format(outputConfigurationBase, false, null), - captureOutputPrintConfiguration(saveFalseNullDir)); + public void isSaveModel_saveFalseEmptyDir() { + assertFalse(saveFalseEmptyDir.isSaveModel()); + } + + @Test + public void getOutputModelDirectory_saveFalseEmptyDir() { + assertEquals(StringUtils.EMPTY, saveFalseEmptyDir.getOutputModelDirectory()); + } + + @Test + public void isSaveModel_saveFalseNullDir() { + assertFalse(saveFalseNullDir.isSaveModel()); + } + + @Test + public void getOutputModelDirectory_saveFalseNullDir() { + assertNull(saveFalseNullDir.getOutputModelDirectory()); } @Test @@ -185,20 +210,4 @@ public void setOutputModelDirectory_and_getOutputModelDirectory() { assertNull(sm.getOutputModelDirectory()); } - private String captureOutputPrintConfiguration(SaveModel sm) { - // Output Variables - ByteArrayOutputStream result = new ByteArrayOutputStream(); - PrintStream ps = new PrintStream(result); - // Save default System.out - PrintStream systemOut = System.out; - // Change System.out - System.setOut(ps); - // Do something - sm.printConfiguration(); - // Restore previous state - System.out.flush(); - System.setOut(systemOut); - // return result - return result.toString(); - } } diff --git a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveQueryResultsTest.java b/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveQueryResultsTest.java index 65988e564..b7f684f61 100644 --- a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveQueryResultsTest.java +++ b/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveQueryResultsTest.java @@ -5,10 +5,8 @@ import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; -import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; -import java.io.PrintStream; import org.junit.Rule; @@ -36,15 +34,14 @@ import org.junit.rules.TemporaryFolder; public class SaveQueryResultsTest { - private final String outputConfigurationBase = " --save-query-results: %b\n --output-query-result-directory: %s\n"; - private final String defaultDir = "query-results"; - private final SaveQueryResults saveTrueDefaultDir = new SaveQueryResults(true, defaultDir); - private final SaveQueryResults saveTrueEmptyDir = new SaveQueryResults(true, ""); - private final SaveQueryResults saveTrueNullDir = new SaveQueryResults(true, null); - private final SaveQueryResults saveFalseDefaultDir = new SaveQueryResults(false, defaultDir); - private final SaveQueryResults saveFalseEmptyDir = new SaveQueryResults(false, ""); - private final SaveQueryResults saveFalseNullDir = new SaveQueryResults(false, null); + private static final SaveQueryResults saveTrueDefaultDir = new SaveQueryResults(true, + SaveQueryResults.DEFAULT_OUTPUT_DIR_NAME); + private static final SaveQueryResults saveTrueEmptyDir = new SaveQueryResults(true, ""); + private static final SaveQueryResults saveTrueNullDir = new SaveQueryResults(true, null); + private static final SaveQueryResults saveFalseDefaultDir = new SaveQueryResults(); + private static final SaveQueryResults saveFalseEmptyDir = new SaveQueryResults(false, ""); + private static final SaveQueryResults saveFalseNullDir = new SaveQueryResults(false, null); @Rule public TemporaryFolder tempFolder = new TemporaryFolder(); @@ -131,39 +128,13 @@ public void mkdir_saveFalse() throws IOException { } @Test - public void printConfiguration_saveTrueDefaultDir() { - assertEquals(String.format(outputConfigurationBase, true, defaultDir), - captureOutputPrintConfiguration(saveTrueDefaultDir)); + public void isSaveResultsl_saveFalseDefaultDir() { + assertFalse(saveFalseDefaultDir.isSaveResults()); } @Test - public void printConfiguration_saveTrueEmptyDir() { - assertEquals(String.format(outputConfigurationBase, true, ""), - captureOutputPrintConfiguration(saveTrueEmptyDir)); - } - - @Test - public void printConfiguration_saveTrueNullDir() { - assertEquals(String.format(outputConfigurationBase, true, null), - captureOutputPrintConfiguration(saveTrueNullDir)); - } - - @Test - public void printConfiguration_saveFalseDefaultDir() { - assertEquals(String.format(outputConfigurationBase, false, defaultDir), - captureOutputPrintConfiguration(saveFalseDefaultDir)); - } - - @Test - public void printConfiguration_saveFalseEmptyDir() { - assertEquals(String.format(outputConfigurationBase, false, ""), - captureOutputPrintConfiguration(saveFalseEmptyDir)); - } - - @Test - public void printConfiguration_saveFalseNullDir() { - assertEquals(String.format(outputConfigurationBase, false, null), - captureOutputPrintConfiguration(saveFalseNullDir)); + public void getOutputQueryResultDirectory_saveFalseDefaultDir() { + assertEquals(SaveQueryResults.DEFAULT_OUTPUT_DIR_NAME, saveFalseDefaultDir.getOutputQueryResultDirectory()); } @Test @@ -184,20 +155,4 @@ public void setOutputQueryResultDirectory_and_getOutputQueryResultsDirectory() { assertNull(srq.getOutputQueryResultDirectory()); } - private String captureOutputPrintConfiguration(SaveQueryResults srq) { - // Output Variables - ByteArrayOutputStream result = new ByteArrayOutputStream(); - PrintStream ps = new PrintStream(result); - // Save default System.out - PrintStream systemOut = System.out; - // Change System.out - System.setOut(ps); - // Do something - srq.printConfiguration(); - // Restore previous state - System.out.flush(); - System.setOut(systemOut); - // return result - return result.toString(); - } } From f658ca589959de7ba8f31aa3ada2a8916ddfc3ad Mon Sep 17 00:00:00 2001 From: alloka Date: Tue, 26 Nov 2019 20:36:06 +0100 Subject: [PATCH 0376/1003] added some fixes for abstractConstants --- .../core/model/implementation/Serializer.java | 31 +++++--- .../implementation/InMemoryDataSource.java | 6 +- .../vlog4j/syntax/parser/EntityTest.java | 75 +++++++++++-------- .../vlog4j/syntax/parser/RuleParserTest.java | 20 ----- 4 files changed, 66 insertions(+), 66 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 92cfe77ff..13e187692 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -55,10 +55,16 @@ public final class Serializer { public static final String AT = "@"; public static final String SOURCE = "@source "; public static final String COLON = ": "; + public static final String COLON_UNSPACED = ":"; public static final String CARET = "^"; public static final String LESS_THAN = "<"; public static final String MORE_THAN = ">"; - public static final String ESCAPED_QUOTE = "\""; + public static final String QUOTE = "\""; + public static final String DOUBLE = "[-+]?[0-9]*\\.?[0-9]+([eE][-+]?[0-9]+)?"; + public static final String INTEGER = "^[-+]?\\d+$"; + public static final String DECIMAL = "^(\\d*\\.)?\\d+$"; + public static final String TRUE = "true"; + public static final String FALSE = "false"; /** * Constructor. @@ -103,7 +109,8 @@ public static String getString(Literal literal) { } else { stringBuilder.append(COMMA); } - stringBuilder.append(term.getSyntacticRepresentation()); + String string = term.getSyntacticRepresentation(); + stringBuilder.append(string); } stringBuilder.append(CLOSING_PARENTHESIS); return stringBuilder.toString(); @@ -128,11 +135,13 @@ public static String getFactString(Fact fact) { * @return String representation corresponding to a given {@link Constant}. */ public static String getString(Constant constant) { - return constant.getName(); - } - - public static String getString(DatatypeConstant constant) { - return getShortConstantName(constant); + if (constant.getName().contains(COLON_UNSPACED) || constant.getName().matches(INTEGER) + || constant.getName().matches(DOUBLE) || constant.getName().matches(DECIMAL) + || constant.getName().equals(TRUE) || constant.getName().equals(FALSE)) { + return LESS_THAN + constant.getName() + MORE_THAN; + } else { + return constant.getName(); + } } /** @@ -226,7 +235,7 @@ public static String getString(Conjunction conjunction) { * {@link LanguageStringConstant}. */ public static String getConstantName(LanguageStringConstant languageStringConstant) { - return ESCAPED_QUOTE + escape(languageStringConstant.getString()) + ESCAPED_QUOTE + AT + return QUOTE + escape(languageStringConstant.getString()) + QUOTE + AT + languageStringConstant.getLanguageTag(); } @@ -239,9 +248,9 @@ public static String getConstantName(LanguageStringConstant languageStringConsta * @return String representation corresponding to a given * {@link DatatypeConstant}. */ - public static String getShortConstantName(DatatypeConstant datatypeConstant) { + public static String getString(DatatypeConstant datatypeConstant) { if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_STRING)) { - return ESCAPED_QUOTE + datatypeConstant.getLexicalValue() + ESCAPED_QUOTE; + return QUOTE + datatypeConstant.getLexicalValue() + QUOTE; } else { if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DECIMAL) || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_INTEGER) @@ -264,7 +273,7 @@ public static String getShortConstantName(DatatypeConstant datatypeConstant) { * {@link DatatypeConstant}. */ public static String getConstantName(DatatypeConstant datatypeConstant) { - return ESCAPED_QUOTE + escape(datatypeConstant.getLexicalValue()) + ESCAPED_QUOTE + CARET + CARET + LESS_THAN + return QUOTE + escape(datatypeConstant.getLexicalValue()) + QUOTE + CARET + CARET + LESS_THAN + datatypeConstant.getDatatype() + MORE_THAN; } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java index 8c005a782..3bd708525 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java @@ -94,10 +94,8 @@ public String[][] getData() { @Override public String getSyntacticRepresentation() { - - String message = "This InMemoryDataSource holds the following tuples of constant names, one tuple per line:"; - StringBuilder facts = new StringBuilder(""); - facts.append(message); + StringBuilder facts = new StringBuilder( + "This InMemoryDataSource holds the following tuples of constant names, one tuple per line:"); for (int i = 0; i < this.getData().length; i++) { for (int j = 0; j < data[i].length; j++) { facts.append(data[i][j] + " "); diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java index b0460357f..a79b7b853 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java @@ -24,7 +24,7 @@ import java.io.File; import java.io.IOException; import java.net.URL; -import java.util.Arrays; + import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Constant; @@ -54,18 +54,23 @@ public class EntityTest { public void languageStringConstantToStringRoundTripTest() throws ParsingException { LanguageStringConstantImpl s = new LanguageStringConstantImpl("Test", "en"); Predicate p = Expressions.makePredicate("p", 1); - Fact f3 = Expressions.makeFact(p, Arrays.asList(s)); + Fact f3 = Expressions.makeFact(p, s); assertEquals(f3, RuleParser.parseFact(f3.toString())); } - public void abstractConstantToStringRoundTripTest() throws ParsingException { + @Test + public void abstractConstantStringToStringRoundTripTest() throws ParsingException { AbstractConstantImpl f = new AbstractConstantImpl("f"); - AbstractConstantImpl a = new AbstractConstantImpl("1"); - Predicate p = Expressions.makePredicate("p", 1); - Fact f1 = Expressions.makeFact(p, Arrays.asList(f)); - Fact f2 = Expressions.makeFact(p, Arrays.asList(a)); + Fact f1 = Expressions.makeFact("p", f); + assertEquals(f1, RuleParser.parseFact(f1.toString())); + } + + @Test + public void abstractConstantAbsoluteToStringRoundTripTest() throws ParsingException { + AbstractConstantImpl a = new AbstractConstantImpl("http://example.org/test"); + Fact f1 = Expressions.makeFact("p", a); + System.out.println(f1.toString()); assertEquals(f1, RuleParser.parseFact(f1.toString())); - assertEquals(f2, RuleParser.parseFact(f2.toString())); } @Test @@ -95,27 +100,34 @@ public void conjunctionToStringRoundTripTest() throws ParsingException { Conjunction bodyLiterals = Expressions.makeConjunction(atom1, atom2); Conjunction headPositiveLiterals = Expressions.makePositiveConjunction(headAtom1); Rule rule1 = new RuleImpl(headPositiveLiterals, bodyLiterals); - assertEquals(rule1, RuleParser.parseRule(rule1.toString())); + assertEquals(bodyLiterals, RuleParser.parseRule(rule1.toString()).getBody()); + assertEquals(headPositiveLiterals, RuleParser.parseRule(rule1.toString()).getHead()); + } + + @Test + public void positiveLiteralToStringRoundTripTest() throws ParsingException { + Constant c = Expressions.makeAbstractConstant("c"); + Variable x = Expressions.makeUniversalVariable("X"); + PositiveLiteral atom1 = Expressions.makePositiveLiteral("p", x, c); + assertEquals(atom1, RuleParser.parseLiteral(atom1.toString())); } @Test public void literalToStringRoundTripTest() throws ParsingException { Constant c = Expressions.makeAbstractConstant("c"); Variable x = Expressions.makeUniversalVariable("X"); - Variable z = Expressions.makeExistentialVariable("Z"); NegativeLiteral atom1 = Expressions.makeNegativeLiteral("p", x, c); - PositiveLiteral headAtom1 = Expressions.makePositiveLiteral("q", x, z); - Rule rule1 = Expressions.makeRule(headAtom1, atom1); - assertEquals(rule1, RuleParser.parseRule(rule1.toString())); + assertEquals(atom1, RuleParser.parseLiteral(atom1.toString())); } @Test public void datatypeDoubleConstantToStringRoundTripTest() throws ParsingException { String shortDoubleConstant = "12.345E67"; assertEquals(shortDoubleConstant, - RuleParser.parseFact("p(\"12.345E67\"^^).").getArguments() - .get(0).toString()); - assertEquals(shortDoubleConstant, RuleParser.parseFact("p(12.345E67).").getArguments().get(0).toString()); + RuleParser.parseFact("p(\"" + shortDoubleConstant + "\"^^).") + .getArguments().get(0).toString()); + assertEquals(shortDoubleConstant, + RuleParser.parseFact("p(" + shortDoubleConstant + ").").getArguments().get(0).toString()); } @Test @@ -128,26 +140,31 @@ public void datatypeFloatConstantToStringRoundTripTest() throws ParsingException @Test public void datatypeStringConstantToStringRoundTripTest() throws ParsingException { String shortStringConstant = "\"data\""; - assertEquals(shortStringConstant, RuleParser - .parseFact("p(\"data\"^^).").getArguments().get(0).toString()); - assertEquals(shortStringConstant, RuleParser.parseFact("p(\"data\").").getArguments().get(0).toString()); + assertEquals(shortStringConstant, + RuleParser.parseFact("p(" + shortStringConstant + "^^).") + .getArguments().get(0).toString()); + assertEquals(shortStringConstant, + RuleParser.parseFact("p(" + shortStringConstant + ").").getArguments().get(0).toString()); } @Test public void datatypeIntegerConstantToStringRoundTripTest() throws ParsingException { String shortIntegerConstant = "1"; - assertEquals(shortIntegerConstant, RuleParser.parseFact("p(\"1\"^^).") - .getArguments().get(0).toString()); - assertEquals(shortIntegerConstant, RuleParser.parseFact("p(1).").getArguments().get(0).toString()); + assertEquals(shortIntegerConstant, + RuleParser.parseFact("p(\"" + shortIntegerConstant + "\"^^).") + .getArguments().get(0).toString()); + assertEquals(shortIntegerConstant, + RuleParser.parseFact("p(" + shortIntegerConstant + ").").getArguments().get(0).toString()); } @Test public void datatypeDecimalToStringRoundTripTest() throws ParsingException { String shortDecimalConstant = "0.23"; assertEquals(shortDecimalConstant, - RuleParser.parseFact("p(\"0.23\"^^).").getArguments().get(0) - .toString()); - assertEquals(shortDecimalConstant, RuleParser.parseFact("p(0.23).").getArguments().get(0).toString()); + RuleParser.parseFact("p(\"" + shortDecimalConstant + "\"^^).") + .getArguments().get(0).toString()); + assertEquals(shortDecimalConstant, + RuleParser.parseFact("p(" + shortDecimalConstant + ").").getArguments().get(0).toString()); } @Test @@ -164,10 +181,8 @@ public void sparqlDataSourceDeclarationToStringParsingTest() throws ParsingExcep @Test public void rdfDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { KnowledgeBase kb = new KnowledgeBase(); - String INPUT_FOLDER = "src/test/data/input/"; - File unzippedRdfFile = new File(INPUT_FOLDER + "file.nt"); Predicate predicate1 = Expressions.makePredicate("p", 3); - RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(unzippedRdfFile); + RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(new File("src/test/data/input/file.nt")); DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate1, unzippedRdfFileDataSource); RuleParser.parseInto(kb, dataSourceDeclaration.toString()); @@ -177,10 +192,8 @@ public void rdfDataSourceDeclarationToStringParsingTest() throws ParsingExceptio @Test public void csvDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { KnowledgeBase kb = new KnowledgeBase(); - String INPUT_FOLDER = "src/test/data/input/"; - String csvFile = INPUT_FOLDER + "file.csv"; Predicate predicate1 = Expressions.makePredicate("q", 1); - CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File(csvFile)); + CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File("src/test/data/input/file.csv")); final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate1, unzippedCsvFileDataSource); RuleParser.parseInto(kb, dataSourceDeclaration.toString()); diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index c5d4b7ecb..4e8a4fd3f 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -412,24 +412,4 @@ public void testBlankPredicateName() throws ParsingException { RuleParser.parse(input); } - @Test - public void DatatypeConstantgRoundTripTest() throws ParsingException { - DatatypeConstantImpl datatypeConstantString = new DatatypeConstantImpl("data", PrefixDeclarations.XSD_STRING); - DatatypeConstantImpl datatypeConstantInteger = new DatatypeConstantImpl("1", PrefixDeclarations.XSD_INTEGER); - DatatypeConstantImpl datatypeConstantFloat = new DatatypeConstantImpl("0.5", PrefixDeclarations.XSD_FLOAT); - DatatypeConstantImpl datatypeConstantDouble = new DatatypeConstantImpl("0.5", PrefixDeclarations.XSD_DOUBLE); - DatatypeConstantImpl datatypeConstantDecimal = new DatatypeConstantImpl("0.23", PrefixDeclarations.XSD_DECIMAL); - assertEquals(datatypeConstantString, - RuleParser.parseFact("p(\"data\"^^).").getArguments().get(0)); - assertEquals(datatypeConstantInteger, - RuleParser.parseFact("p(\"1\"^^).").getArguments().get(0)); - assertEquals(datatypeConstantFloat, - RuleParser.parseFact("p(\"0.5\"^^).").getArguments().get(0)); - assertEquals(datatypeConstantDouble, - RuleParser.parseFact("p(\"0.5\"^^).").getArguments().get(0)); - assertEquals(datatypeConstantDecimal, - RuleParser.parseFact("p(\"0.23\"^^).").getArguments().get(0)); - - } - } From 195fd7ce863c3d53205b2238c7df41a2d831724a Mon Sep 17 00:00:00 2001 From: alloka Date: Wed, 27 Nov 2019 12:37:16 +0100 Subject: [PATCH 0377/1003] added roundtripping for absolute abstractConstants --- .../vlog4j/syntax/parser/EntityTest.java | 29 ++++++++++++++++++- .../vlog4j/syntax/parser/RuleParserTest.java | 4 +-- 2 files changed, 29 insertions(+), 4 deletions(-) diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java index a79b7b853..9bb9e68ef 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java @@ -69,7 +69,34 @@ public void abstractConstantStringToStringRoundTripTest() throws ParsingExceptio public void abstractConstantAbsoluteToStringRoundTripTest() throws ParsingException { AbstractConstantImpl a = new AbstractConstantImpl("http://example.org/test"); Fact f1 = Expressions.makeFact("p", a); - System.out.println(f1.toString()); + assertEquals(f1, RuleParser.parseFact(f1.toString())); + } + + @Test + public void abstractConstantRelativeDoubleToStringRoundTripTest() throws ParsingException { + AbstractConstantImpl b = new AbstractConstantImpl("4.2E9"); + Fact f1 = Expressions.makeFact("p", b); + assertEquals(f1, RuleParser.parseFact(f1.toString())); + } + + @Test + public void abstractConstantRelativeIntegerToStringRoundTripTest() throws ParsingException { + AbstractConstantImpl b = new AbstractConstantImpl("11"); + Fact f1 = Expressions.makeFact("p", b); + assertEquals(f1, RuleParser.parseFact(f1.toString())); + } + + @Test + public void abstractConstantRelativeBooleanToStringRoundTripTest() throws ParsingException { + AbstractConstantImpl b = new AbstractConstantImpl("false"); + Fact f1 = Expressions.makeFact("p", b); + assertEquals(f1, RuleParser.parseFact(f1.toString())); + } + + @Test + public void abstractConstantRelativeDecimalToStringRoundTripTest() throws ParsingException { + AbstractConstantImpl b = new AbstractConstantImpl("-5.0"); + Fact f1 = Expressions.makeFact("p", b); assertEquals(f1, RuleParser.parseFact(f1.toString())); } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 4e8a4fd3f..8b0693a54 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -19,8 +19,7 @@ * limitations under the License. * #L% */ - -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; import java.io.File; import java.io.IOException; @@ -41,7 +40,6 @@ import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.vlog4j.core.model.implementation.DatatypeConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; From b4801bd03718342541d9e9abf27942800230da4b Mon Sep 17 00:00:00 2001 From: alloka Date: Fri, 29 Nov 2019 18:15:41 +0100 Subject: [PATCH 0378/1003] added predicate relative/absolute support --- .../model/api/LanguageStringConstant.java | 2 +- .../model/implementation/Expressions.java | 12 ++++ .../core/model/implementation/Serializer.java | 26 +++++--- .../vlog4j/syntax/parser/EntityTest.java | 63 +++++++++++++++++++ 4 files changed, 92 insertions(+), 11 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java index b3694d565..2c150b507 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java @@ -65,6 +65,6 @@ default String getDatatype() { @Override default String getSyntacticRepresentation() { - return Serializer.getString(this); + return Serializer.getConstantName(this); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java index eedc94f8c..625b0c4c6 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java @@ -46,7 +46,19 @@ * @author Markus Krötzsch * */ + public final class Expressions { + + public static final String COLON_UNSPACED = ":"; + public static final String LESS_THAN = "<"; + public static final String MORE_THAN = ">"; + public static final String QUOTE = "\""; + public static final String DOUBLE = "^[-+]?[0-9]+[.]?[0-9]*([eE][-+]?[0-9]+)?$"; + public static final String INTEGER = "^[-+]?\\d+$"; + public static final String DECIMAL = "^(\\d*\\.)?\\d+$"; + public static final String TRUE = "true"; + public static final String FALSE = "false"; + /** * Private constructor prevents this utilities class to be instantiated. */ diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 13e187692..59485b205 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.core.model.implementation; +import org.semanticweb.vlog4j.core.model.api.AbstractConstant; + /*- * #%L * VLog4j Core Components @@ -60,7 +62,7 @@ public final class Serializer { public static final String LESS_THAN = "<"; public static final String MORE_THAN = ">"; public static final String QUOTE = "\""; - public static final String DOUBLE = "[-+]?[0-9]*\\.?[0-9]+([eE][-+]?[0-9]+)?"; + public static final String DOUBLE = "^[-+]?[0-9]+[.]?[0-9]*([eE][-+]?[0-9]+)?$"; public static final String INTEGER = "^[-+]?\\d+$"; public static final String DECIMAL = "^(\\d*\\.)?\\d+$"; public static final String TRUE = "true"; @@ -77,6 +79,16 @@ private static String escape(String string) { return string.replace("\\", "\\\\").replace("\"", "\\\""); } + private static String checkRelativeAbsoluteIri(String string) { + if ((string.contains(COLON_UNSPACED) || string.matches(INTEGER) || string.matches(DOUBLE) + || string.matches(DECIMAL) || string.equals(TRUE) || string.equals(FALSE)) + && (!string.contains(LESS_THAN))) { + return LESS_THAN + string + MORE_THAN; + } else { + return string; + } + } + /** * Creates a String representation of a given {@link Rule}. * @@ -101,7 +113,7 @@ public static String getString(Literal literal) { if (literal.isNegated()) { stringBuilder.append(NEGATIVE_IDENTIFIER); } - stringBuilder.append(literal.getPredicate().getName()).append(OPEN_PARENTHESIS); + stringBuilder.append(checkRelativeAbsoluteIri(literal.getPredicate().getName())).append(OPEN_PARENTHESIS); boolean first = true; for (final Term term : literal.getArguments()) { if (first) { @@ -134,14 +146,8 @@ public static String getFactString(Fact fact) { * @param constant a {@link Constant} * @return String representation corresponding to a given {@link Constant}. */ - public static String getString(Constant constant) { - if (constant.getName().contains(COLON_UNSPACED) || constant.getName().matches(INTEGER) - || constant.getName().matches(DOUBLE) || constant.getName().matches(DECIMAL) - || constant.getName().equals(TRUE) || constant.getName().equals(FALSE)) { - return LESS_THAN + constant.getName() + MORE_THAN; - } else { - return constant.getName(); - } + public static String getString(AbstractConstant constant) { + return checkRelativeAbsoluteIri(constant.getName()); } /** diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java index 9bb9e68ef..cff88cb13 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java @@ -35,6 +35,7 @@ import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; @@ -100,6 +101,68 @@ public void abstractConstantRelativeDecimalToStringRoundTripTest() throws Parsin assertEquals(f1, RuleParser.parseFact(f1.toString())); } + @Test + public void iriRoundTripTest() throws ParsingException { + String abstractConstant = "<1.0>"; + Fact f2 = RuleParser.parseFact("p(" + abstractConstant + ")."); + assertEquals(abstractConstant, f2.getArguments().get(0).toString()); + } + + @Test + public void iriRoundTripTest2() throws ParsingException { + String abstractConstant = ""; + Fact f2 = RuleParser.parseFact("p(" + abstractConstant + ")."); + assertEquals(abstractConstant, f2.getArguments().get(0).toString()); + } + + @Test + public void iriRoundTripTest3() throws ParsingException { + String abstractConstant = ""; + Fact f2 = RuleParser.parseFact("p(" + abstractConstant + ")."); + assertEquals(abstractConstant, f2.getArguments().get(0).toString()); + } + + @Test + public void predicateIriRoundTripTest() throws ParsingException { + AbstractConstantImpl a = new AbstractConstantImpl("a"); + Fact f = Expressions.makeFact("1.e1", a); + assertEquals(f, RuleParser.parseFact(f.toString())); + } + + @Test + public void predicateIriRoundTripTest2() throws ParsingException { + AbstractConstantImpl a = new AbstractConstantImpl("a"); + Fact f = RuleParser.parseFact("<1.e1>(a)."); + Fact f2 = Expressions.makeFact("1.e1", a); + assertEquals(f, f2); + } + + @Test + public void predicateIriRoundTripTest3() throws ParsingException { + AbstractConstantImpl a = new AbstractConstantImpl("a"); + Fact f = RuleParser.parseFact("(a)."); + Fact f2 = Expressions.makeFact("a:b", a); + assertEquals(f, f2); + } + + @Test + public void predicateRoundTripTest3() throws ParsingException { + AbstractConstantImpl a = new AbstractConstantImpl("a"); + Fact f2 = Expressions.makeFact("a:1", a); + assertEquals(f2, RuleParser.parseFact(f2.toString())); + } + + @Test + public void iriAngularBracketsTest() throws ParsingException { + String constant = "a"; + Fact fact = RuleParser.parseFact("p(" + constant + ")"); + Term abstractConst = fact.getArguments().get(0); + assertEquals(constant, abstractConst.toString()); + Fact fact2 = RuleParser.parseFact("p(<" + constant + ">)"); + Term abstractConst2 = fact2.getArguments().get(0); + assertEquals(abstractConst, abstractConst2); + } + @Test public void ruleToStringRoundTripTest() throws ParsingException { Constant c = Expressions.makeAbstractConstant("c"); From e2ec110a78b874733290d722c510657dd0b18921 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Mon, 2 Dec 2019 12:07:29 +0100 Subject: [PATCH 0379/1003] added license header --- .../client/picocli/PrintQueryResultsTest.java | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/PrintQueryResultsTest.java b/vlog4j-client/src/test/java/org/vlog4j/client/picocli/PrintQueryResultsTest.java index 13469c358..b949a5ff3 100644 --- a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/PrintQueryResultsTest.java +++ b/vlog4j-client/src/test/java/org/vlog4j/client/picocli/PrintQueryResultsTest.java @@ -1,5 +1,25 @@ package org.vlog4j.client.picocli; +/*- + * #%L + * VLog4j Client + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; From 9030082f4b27123df4a3c26fa6b136c760a5c1c0 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Mon, 2 Dec 2019 12:19:12 +0100 Subject: [PATCH 0380/1003] eliminate client dependency to examples, duplicate code in ClientUtils --- vlog4j-client/pom.xml | 8 +- .../vlog4j/client/picocli/ClientUtils.java | 122 ++++++++++++++++++ .../picocli/VLog4jClientMaterialize.java | 7 +- 3 files changed, 129 insertions(+), 8 deletions(-) create mode 100644 vlog4j-client/src/main/java/org/vlog4j/client/picocli/ClientUtils.java diff --git a/vlog4j-client/pom.xml b/vlog4j-client/pom.xml index 267b14896..fbee05c25 100644 --- a/vlog4j-client/pom.xml +++ b/vlog4j-client/pom.xml @@ -31,10 +31,10 @@ vlog4j-parser ${project.version} - - ${project.groupId} - vlog4j-examples - ${project.version} + + org.slf4j + slf4j-log4j12 + ${slf4j.version} info.picocli diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/ClientUtils.java b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/ClientUtils.java new file mode 100644 index 000000000..5893f86c2 --- /dev/null +++ b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/ClientUtils.java @@ -0,0 +1,122 @@ +package org.vlog4j.client.picocli; + +/*- + * #%L + * VLog4j Client + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.Iterator; + +import org.apache.log4j.ConsoleAppender; +import org.apache.log4j.Level; +import org.apache.log4j.Logger; +import org.apache.log4j.PatternLayout; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; +import org.semanticweb.vlog4j.core.reasoner.Reasoner; + +/** + * Utility class for interacting with the vlog4j client. + * + * @author dragoste + * + */ +public final class ClientUtils { + + /** + * Private constructor. This is a utility class. Therefore, it is best practice + * to do the following: (1) Make the class final, (2) make its constructor + * private, (3) make all its fields and methods static. This prevents the + * classes instantiation and inheritance. + */ + private ClientUtils() { + + } + + /** + * Defines how messages should be logged. This method can be modified to + * restrict the logging messages that are shown on the console or to change + * their formatting. See the documentation of Log4J for details on how to do + * this. + * + * Note: The VLog C++ backend performs its own logging. The log-level for this + * can be configured using + * {@link Reasoner#setLogLevel(org.semanticweb.vlog4j.core.reasoner.LogLevel)}. + * It is also possible to specify a separate log file for this part of the logs. + */ + public static void configureLogging() { + // Create the appender that will write log messages to the console. + final ConsoleAppender consoleAppender = new ConsoleAppender(); + // Define the pattern of log messages. + // Insert the string "%c{1}:%L" to also show class name and line. + final String pattern = "%d{yyyy-MM-dd HH:mm:ss} %-5p - %m%n"; + consoleAppender.setLayout(new PatternLayout(pattern)); + // Change to Level.ERROR for fewer messages: + consoleAppender.setThreshold(Level.INFO); + + consoleAppender.activateOptions(); + Logger.getRootLogger().addAppender(consoleAppender); + } + + /** + * Prints out the answers given by {@code reasoner} to the query + * ({@code queryAtom}). + * + * @param queryAtom query to be answered + * @param reasoner reasoner to query on + */ + public static void printOutQueryAnswers(final PositiveLiteral queryAtom, final Reasoner reasoner) { + System.out.println("Answers to query " + queryAtom + " :"); + try (final QueryResultIterator answers = reasoner.answerQuery(queryAtom, true)) { + answers.forEachRemaining(answer -> System.out.println(" - " + answer)); + + System.out.println("Query answers are: " + answers.getCorrectness()); + } + System.out.println(); + } + + /** + * Returns the number of answers returned by {@code reasoner} to the query + * ({@code queryAtom}). + * + * @param queryAtom query to be answered + * @param reasoner reasoner to query on + */ + public static int getQueryAnswerCount(final PositiveLiteral queryAtom, final Reasoner reasoner) { + try (final QueryResultIterator answers = reasoner.answerQuery(queryAtom, true)) { + return iteratorSize(answers); + } + } + + /** + * Returns the size of an iterator. + * + * @FIXME This is an inefficient way of counting results. It should be done at a + * lower level instead + * @param Iterator to iterate over + * @return number of elements in iterator + */ + private static int iteratorSize(final Iterator iterator) { + int size = 0; + for (; iterator.hasNext(); ++size) { + iterator.next(); + } + return size; + } + +} diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java index e2c0f790c..84f713662 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java +++ b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java @@ -32,7 +32,6 @@ import org.semanticweb.vlog4j.core.reasoner.LogLevel; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.vlog4j.examples.ExamplesUtils; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; @@ -87,7 +86,7 @@ public class VLog4jClientMaterialize implements Runnable { @Override public void run() { - ExamplesUtils.configureLogging(); + ClientUtils.configureLogging(); /* Validate configuration */ this.validateConfiguration(); @@ -193,7 +192,7 @@ private void answerQueries(final Reasoner reasoner) { this.doPrintResults(reasoner, query); } else if (this.printQueryResults.isComplete()) { // print facts - ExamplesUtils.printOutQueryAnswers(query, reasoner); + ClientUtils.printOutQueryAnswers(query, reasoner); } } } @@ -233,7 +232,7 @@ private void doSaveQueryResults(final Reasoner reasoner, final PositiveLiteral q private void doPrintResults(final Reasoner reasoner, final PositiveLiteral query) { System.out.println( - "Number of query answers in " + query + ": " + ExamplesUtils.getQueryAnswerCount(query, reasoner)); + "Number of query answers in " + query + ": " + ClientUtils.getQueryAnswerCount(query, reasoner)); } private String queryOputputPath(final PositiveLiteral query) { From bb2ee938d55c9ffdb5dc90942d1cd3d9ccd02eb1 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Mon, 2 Dec 2019 13:43:19 +0100 Subject: [PATCH 0381/1003] remove addons --- .travis.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.travis.yml b/.travis.yml index f50b6c2fa..ac5033743 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,13 +4,13 @@ jdk: # - oraclejdk8 # - oraclejdk9 -addons: - apt: - sources: - - ubuntu-toolchain-r-test - packages: - - gcc-5 - - g++-5 +##addons: + ##apt: + ##sources: + #- ubuntu-toolchain-r-test +## packages: +# - gcc-5 +# - g++-5 ## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar #before_install: From 5ebbd0d592ea1dbf4d35e2d14da5028a7d251637 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Mon, 2 Dec 2019 13:43:37 +0100 Subject: [PATCH 0382/1003] remove gcc configurations --- build-vlog-library.sh | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/build-vlog-library.sh b/build-vlog-library.sh index 599ed109f..7008b2fdd 100644 --- a/build-vlog-library.sh +++ b/build-vlog-library.sh @@ -6,10 +6,7 @@ then echo "Using cached VLog JAR." else echo "Building new VLog JAR." - if [ "$(which gcc-5)x" != "x" ]; then - export CC=gcc-5 && export CXX=g++-5 - fi - mkdir -p local_builds + mkdir -p local_builds rm -rf build-vlog mkdir build-vlog cd build-vlog From dc3cea51f43d9fd277473ac785457a126cdcc3c1 Mon Sep 17 00:00:00 2001 From: alloka Date: Mon, 2 Dec 2019 13:57:53 +0100 Subject: [PATCH 0383/1003] added some changes --- .../model/implementation/Expressions.java | 10 ---------- .../vlog4j/syntax/parser/EntityTest.java | 19 +------------------ .../vlog4j/syntax/parser/RuleParserTest.java | 18 ++++++++++++++++++ 3 files changed, 19 insertions(+), 28 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java index 625b0c4c6..2ffbfcf28 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java @@ -49,16 +49,6 @@ public final class Expressions { - public static final String COLON_UNSPACED = ":"; - public static final String LESS_THAN = "<"; - public static final String MORE_THAN = ">"; - public static final String QUOTE = "\""; - public static final String DOUBLE = "^[-+]?[0-9]+[.]?[0-9]*([eE][-+]?[0-9]+)?$"; - public static final String INTEGER = "^[-+]?\\d+$"; - public static final String DECIMAL = "^(\\d*\\.)?\\d+$"; - public static final String TRUE = "true"; - public static final String FALSE = "false"; - /** * Private constructor prevents this utilities class to be instantiated. */ diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java index cff88cb13..8bd7cb008 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java @@ -129,22 +129,6 @@ public void predicateIriRoundTripTest() throws ParsingException { assertEquals(f, RuleParser.parseFact(f.toString())); } - @Test - public void predicateIriRoundTripTest2() throws ParsingException { - AbstractConstantImpl a = new AbstractConstantImpl("a"); - Fact f = RuleParser.parseFact("<1.e1>(a)."); - Fact f2 = Expressions.makeFact("1.e1", a); - assertEquals(f, f2); - } - - @Test - public void predicateIriRoundTripTest3() throws ParsingException { - AbstractConstantImpl a = new AbstractConstantImpl("a"); - Fact f = RuleParser.parseFact("(a)."); - Fact f2 = Expressions.makeFact("a:b", a); - assertEquals(f, f2); - } - @Test public void predicateRoundTripTest3() throws ParsingException { AbstractConstantImpl a = new AbstractConstantImpl("a"); @@ -223,8 +207,7 @@ public void datatypeDoubleConstantToStringRoundTripTest() throws ParsingExceptio @Test public void datatypeFloatConstantToStringRoundTripTest() throws ParsingException { String floatConstant = "\"0.5\"^^"; - assertEquals(floatConstant, RuleParser.parseFact("p(\"0.5\"^^).") - .getArguments().get(0).toString()); + assertEquals(floatConstant, RuleParser.parseFact("p(" + floatConstant + ").").getArguments().get(0).toString()); } @Test diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 8b0693a54..66a5b4748 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -32,6 +32,7 @@ import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; @@ -39,6 +40,7 @@ import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.Variable; +import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; @@ -410,4 +412,20 @@ public void testBlankPredicateName() throws ParsingException { RuleParser.parse(input); } + @Test + public void predicateIriRoundTripTest() throws ParsingException { + AbstractConstantImpl a = new AbstractConstantImpl("a"); + Fact f = RuleParser.parseFact("<1.e1>(a)."); + Fact f2 = Expressions.makeFact("1.e1", a); + assertEquals(f, f2); + } + + @Test + public void predicateIriRoundTripTest2() throws ParsingException { + AbstractConstantImpl a = new AbstractConstantImpl("a"); + Fact f = RuleParser.parseFact("(a)."); + Fact f2 = Expressions.makeFact("a:b", a); + assertEquals(f, f2); + } + } From 945e5920360cb005c579a834ee261bce14eacb39 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Mon, 2 Dec 2019 14:00:13 +0100 Subject: [PATCH 0384/1003] remove commented addons lines. --- .travis.yml | 8 -------- 1 file changed, 8 deletions(-) diff --git a/.travis.yml b/.travis.yml index ac5033743..24e616797 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,14 +4,6 @@ jdk: # - oraclejdk8 # - oraclejdk9 -##addons: - ##apt: - ##sources: - #- ubuntu-toolchain-r-test -## packages: -# - gcc-5 -# - g++-5 - ## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar #before_install: # - sudo apt-get install gcc-5 -y From 90c21ee72d427a69f11fe438678b88f0557c2991 Mon Sep 17 00:00:00 2001 From: alloka Date: Mon, 2 Dec 2019 15:00:13 +0100 Subject: [PATCH 0385/1003] added some changes --- .../core/model/implementation/Serializer.java | 3 +-- .../vlog4j/syntax/parser/RuleParserTest.java | 19 ------------------- 2 files changed, 1 insertion(+), 21 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 59485b205..ad3d345b2 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -81,8 +81,7 @@ private static String escape(String string) { private static String checkRelativeAbsoluteIri(String string) { if ((string.contains(COLON_UNSPACED) || string.matches(INTEGER) || string.matches(DOUBLE) - || string.matches(DECIMAL) || string.equals(TRUE) || string.equals(FALSE)) - && (!string.contains(LESS_THAN))) { + || string.matches(DECIMAL) || string.equals(TRUE) || string.equals(FALSE))) { return LESS_THAN + string + MORE_THAN; } else { return string; diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 66a5b4748..2be0725a1 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -32,7 +32,6 @@ import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; @@ -40,7 +39,6 @@ import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; @@ -411,21 +409,4 @@ public void testBlankPredicateName() throws ParsingException { String input = "_:(a) ."; RuleParser.parse(input); } - - @Test - public void predicateIriRoundTripTest() throws ParsingException { - AbstractConstantImpl a = new AbstractConstantImpl("a"); - Fact f = RuleParser.parseFact("<1.e1>(a)."); - Fact f2 = Expressions.makeFact("1.e1", a); - assertEquals(f, f2); - } - - @Test - public void predicateIriRoundTripTest2() throws ParsingException { - AbstractConstantImpl a = new AbstractConstantImpl("a"); - Fact f = RuleParser.parseFact("(a)."); - Fact f2 = Expressions.makeFact("a:b", a); - assertEquals(f, f2); - } - } From 9350e231851404a7e8dc7e6d566c6527634f78e0 Mon Sep 17 00:00:00 2001 From: alloka Date: Mon, 2 Dec 2019 15:28:05 +0100 Subject: [PATCH 0386/1003] added some changes --- .../vlog4j/syntax/parser/RuleParserTest.java | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 2be0725a1..bdf9ac788 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -32,6 +32,7 @@ import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; @@ -39,6 +40,7 @@ import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.Variable; +import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; @@ -409,4 +411,20 @@ public void testBlankPredicateName() throws ParsingException { String input = "_:(a) ."; RuleParser.parse(input); } + + @Test + public void predicateNormalIriEqualityTest() throws ParsingException { + AbstractConstantImpl a = new AbstractConstantImpl("a"); + Fact f = RuleParser.parseFact("<1.e1>(a)."); + Fact f2 = Expressions.makeFact("1.e1", a); + assertEquals(f, f2); + } + + @Test + public void predicateNormalIriEqualityTest2() throws ParsingException { + AbstractConstantImpl a = new AbstractConstantImpl("a"); + Fact f = RuleParser.parseFact("(a)."); + Fact f2 = Expressions.makeFact("a:b", a); + assertEquals(f, f2); + } } From 6f7d52b3fb802d2c168b36416dde75764e4458b3 Mon Sep 17 00:00:00 2001 From: alloka Date: Mon, 2 Dec 2019 16:12:04 +0100 Subject: [PATCH 0387/1003] fixed merge problems --- .../reasoner/implementation/VLogReasoner.java | 1605 +++++++++-------- 1 file changed, 812 insertions(+), 793 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index b630a6970..5376e0896 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -1,793 +1,812 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; - -import java.io.IOException; -import java.text.MessageFormat; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Formatter; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.lang3.Validate; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Statement; -import org.semanticweb.vlog4j.core.model.api.StatementVisitor; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; -import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; -import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; -import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; -import org.semanticweb.vlog4j.core.model.implementation.UniversalVariableImpl; -import org.semanticweb.vlog4j.core.reasoner.AcyclicityNotion; -import org.semanticweb.vlog4j.core.reasoner.Algorithm; -import org.semanticweb.vlog4j.core.reasoner.Correctness; -import org.semanticweb.vlog4j.core.reasoner.CyclicityResult; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.LogLevel; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.ReasonerState; -import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import karmaresearch.vlog.AlreadyStartedException; -import karmaresearch.vlog.EDBConfigurationException; -import karmaresearch.vlog.MaterializationException; -import karmaresearch.vlog.NonExistingPredicateException; -import karmaresearch.vlog.NotStartedException; -import karmaresearch.vlog.TermQueryResultIterator; -import karmaresearch.vlog.VLog; -import karmaresearch.vlog.VLog.CyclicCheckResult; - -/* - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -/** - * Reasoner implementation using the VLog backend. - * - * - * - * @author Markus Kroetzsch - * - */ -public class VLogReasoner implements Reasoner { - private static Logger LOGGER = LoggerFactory.getLogger(VLogReasoner.class); - - /** - * Dummy data source declaration for predicates for which we have explicit local - * facts in the input. - * - * @author Markus Kroetzsch - * - */ - class LocalFactsDataSourceDeclaration implements DataSourceDeclaration { - - final Predicate predicate; - - public LocalFactsDataSourceDeclaration(Predicate predicate) { - this.predicate = predicate; - } - - @Override - public T accept(StatementVisitor statementVisitor) { - return statementVisitor.visit(this); - } - - @Override - public Predicate getPredicate() { - return this.predicate; - } - - @Override - public DataSource getDataSource() { - return null; - } - - @Override - public int hashCode() { - return predicate.hashCode(); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - final LocalFactsDataSourceDeclaration other = (LocalFactsDataSourceDeclaration) obj; - return predicate.equals(other.predicate); - } - - @Override - public String getSyntacticRepresentation() { - - throw new UnsupportedOperationException( - "This method is not implemented for type LocalFactsDataSourceDeclaration"); - } - - } - - /** - * Local visitor implementation for processing statements upon loading. Internal - * index structures are updated based on the statements that are detected. - * - * @author Markus Kroetzsch - * - */ - class LoadKbVisitor implements StatementVisitor { - - public void clearIndexes() { - edbPredicates.clear(); - idbPredicates.clear(); - aliasedEdbPredicates.clear(); - aliasesForEdbPredicates.clear(); - directEdbFacts.clear(); - rules.clear(); - } - - @Override - public Void visit(Fact statement) { - final Predicate predicate = statement.getPredicate(); - registerEdbDeclaration(new LocalFactsDataSourceDeclaration(predicate)); - if (!directEdbFacts.containsKey(predicate)) { - final List facts = new ArrayList(); - facts.add(statement); - directEdbFacts.put(predicate, facts); - } else { - directEdbFacts.get(predicate).add(statement); - } - return null; - } - - @Override - public Void visit(Rule statement) { - rules.add(statement); - for (final PositiveLiteral positiveLiteral : statement.getHead()) { - final Predicate predicate = positiveLiteral.getPredicate(); - if (!idbPredicates.contains(predicate)) { - if (edbPredicates.containsKey(predicate)) { - addEdbAlias(edbPredicates.get(predicate)); - edbPredicates.remove(predicate); - } - idbPredicates.add(predicate); - } - } - return null; - } - - @Override - public Void visit(DataSourceDeclaration statement) { - registerEdbDeclaration(statement); - return null; - } - - void registerEdbDeclaration(DataSourceDeclaration dataSourceDeclaration) { - final Predicate predicate = dataSourceDeclaration.getPredicate(); - if (idbPredicates.contains(predicate) || aliasedEdbPredicates.contains(predicate)) { - if (!aliasesForEdbPredicates.containsKey(dataSourceDeclaration)) { - addEdbAlias(dataSourceDeclaration); - } - } else { - final DataSourceDeclaration currentMainDeclaration = edbPredicates.get(predicate); - if (currentMainDeclaration == null) { - edbPredicates.put(predicate, dataSourceDeclaration); - } else if (!(currentMainDeclaration.equals(dataSourceDeclaration))) { - addEdbAlias(currentMainDeclaration); - addEdbAlias(dataSourceDeclaration); - edbPredicates.remove(predicate); - } // else: predicate already known to have local facts (only) - } - } - - void addEdbAlias(DataSourceDeclaration dataSourceDeclaration) { - final Predicate predicate = dataSourceDeclaration.getPredicate(); - Predicate aliasPredicate; - if (dataSourceDeclaration instanceof LocalFactsDataSourceDeclaration) { - aliasPredicate = new PredicateImpl(predicate.getName() + "##FACT", predicate.getArity()); - } else { - aliasPredicate = new PredicateImpl(predicate.getName() + "##" + dataSourceDeclaration.hashCode(), - predicate.getArity()); - } - aliasesForEdbPredicates.put(dataSourceDeclaration, aliasPredicate); - aliasedEdbPredicates.add(predicate); - - final List terms = new ArrayList<>(); - for (int i = 1; i <= predicate.getArity(); i++) { - terms.add(new UniversalVariableImpl("X" + i)); - } - final Literal body = new PositiveLiteralImpl(aliasPredicate, terms); - final PositiveLiteral head = new PositiveLiteralImpl(predicate, terms); - final Rule rule = new RuleImpl(new ConjunctionImpl(Arrays.asList(head)), - new ConjunctionImpl(Arrays.asList(body))); - rules.add(rule); - } - - } - - final KnowledgeBase knowledgeBase; - final VLog vLog = new VLog(); - - final Map aliasesForEdbPredicates = new HashMap<>(); - final Set idbPredicates = new HashSet<>(); - final Map edbPredicates = new HashMap<>(); - final Set aliasedEdbPredicates = new HashSet<>(); - final Map> directEdbFacts = new HashMap<>(); - final Set rules = new HashSet<>(); - - private ReasonerState reasonerState = ReasonerState.KB_NOT_LOADED; - private Correctness correctness = Correctness.SOUND_BUT_INCOMPLETE; - - private LogLevel internalLogLevel = LogLevel.WARNING; - private Algorithm algorithm = Algorithm.RESTRICTED_CHASE; - private Integer timeoutAfterSeconds; - private RuleRewriteStrategy ruleRewriteStrategy = RuleRewriteStrategy.NONE; - - /** - * Holds the state of the reasoning result. Has value {@code true} if reasoning - * has completed, {@code false} if it has been interrupted. - */ - private boolean reasoningCompleted; - - public VLogReasoner(KnowledgeBase knowledgeBase) { - super(); - this.knowledgeBase = knowledgeBase; - this.knowledgeBase.addListener(this); - - setLogLevel(this.internalLogLevel); - } - - @Override - public KnowledgeBase getKnowledgeBase() { - return this.knowledgeBase; - } - - @Override - public void setAlgorithm(final Algorithm algorithm) { - Validate.notNull(algorithm, "Algorithm cannot be null!"); - validateNotClosed(); - this.algorithm = algorithm; - } - - @Override - public Algorithm getAlgorithm() { - return this.algorithm; - } - - @Override - public void setReasoningTimeout(Integer seconds) { - validateNotClosed(); - if (seconds != null) { - Validate.isTrue(seconds > 0, "Only strictly positive timeout period allowed!", seconds); - } - this.timeoutAfterSeconds = seconds; - } - - @Override - public Integer getReasoningTimeout() { - return this.timeoutAfterSeconds; - } - - @Override - public void setRuleRewriteStrategy(RuleRewriteStrategy ruleRewritingStrategy) { - validateNotClosed(); - Validate.notNull(ruleRewritingStrategy, "Rewrite strategy cannot be null!"); - this.ruleRewriteStrategy = ruleRewritingStrategy; - } - - @Override - public RuleRewriteStrategy getRuleRewriteStrategy() { - return this.ruleRewriteStrategy; - } - - /* - * TODO Due to automatic predicate renaming, it can happen that an EDB predicate - * cannot be queried after loading unless reasoning has already been invoked - * (since the auxiliary rule that imports the EDB facts to the "real" predicate - * must be used). This issue could be weakened by rewriting queries to - * (single-source) EDB predicates internally when in such a state, - */ - // @Override - void load() throws IOException { - validateNotClosed(); - - switch (this.reasonerState) { - case KB_NOT_LOADED: - loadKnowledgeBase(); - break; - case KB_LOADED: - case MATERIALISED: - // do nothing, all KB is already loaded - break; - case KB_CHANGED: - resetReasoner(); - loadKnowledgeBase(); - default: - break; - } - } - - void loadKnowledgeBase() throws IOException { - LOGGER.info("Started loading knowledge base ..."); - final LoadKbVisitor visitor = new LoadKbVisitor(); - visitor.clearIndexes(); - for (final Statement statement : knowledgeBase) { - statement.accept(visitor); - } - - if (edbPredicates.isEmpty() && aliasedEdbPredicates.isEmpty()) { - LOGGER.warn("No facts have been provided."); - } - - try { - this.vLog.start(getDataSourcesConfigurationString(), false); - } catch (final AlreadyStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); - } catch (final EDBConfigurationException e) { - throw new RuntimeException("Invalid data sources configuration.", e); - } - loadInMemoryDataSources(); - - validateDataSourcePredicateArities(); - - loadFacts(); - loadRules(); - - this.reasonerState = ReasonerState.KB_LOADED; - - // if there are no rules, then materialisation state is complete - this.correctness = rules.isEmpty() ? Correctness.SOUND_AND_COMPLETE : Correctness.SOUND_BUT_INCOMPLETE; - - LOGGER.info("Finished loading knowledge base."); - } - - String getDataSourcesConfigurationString() { - final StringBuilder configStringBuilder = new StringBuilder(); - final Formatter formatter = new Formatter(configStringBuilder); - int dataSourceIndex = 0; - for (final Predicate predicate : this.edbPredicates.keySet()) { - final DataSourceDeclaration dataSourceDeclaration = this.edbPredicates.get(predicate); - dataSourceIndex = addDataSourceConfigurationString(dataSourceDeclaration.getDataSource(), predicate, - dataSourceIndex, formatter); - } - for (final DataSourceDeclaration dataSourceDeclaration : this.aliasesForEdbPredicates.keySet()) { - final Predicate aliasPredicate = this.aliasesForEdbPredicates.get(dataSourceDeclaration); - dataSourceIndex = addDataSourceConfigurationString(dataSourceDeclaration.getDataSource(), aliasPredicate, - dataSourceIndex, formatter); - } - formatter.close(); - return configStringBuilder.toString(); - } - - int addDataSourceConfigurationString(DataSource dataSource, Predicate predicate, int dataSourceIndex, - Formatter formatter) { - if (dataSource != null) { - final String configString = dataSource.toConfigString(); - if (configString != null) { - formatter.format(dataSource.toConfigString(), dataSourceIndex, - ModelToVLogConverter.toVLogPredicate(predicate)); - return dataSourceIndex + 1; - } - } - return dataSourceIndex; - } - - /** - * Checks if the loaded external data sources do in fact contain data of the - * correct arity. - * - * @throws IncompatiblePredicateArityException to indicate a problem - * (non-checked exception) - */ - void validateDataSourcePredicateArities() throws IncompatiblePredicateArityException { - for (final Predicate predicate : edbPredicates.keySet()) { - validateDataSourcePredicateArity(predicate, edbPredicates.get(predicate).getDataSource()); - } - for (final DataSourceDeclaration dataSourceDeclaration : aliasesForEdbPredicates.keySet()) { - validateDataSourcePredicateArity(aliasesForEdbPredicates.get(dataSourceDeclaration), - dataSourceDeclaration.getDataSource()); - } - } - - void loadInMemoryDataSources() { - for (final Predicate predicate : this.edbPredicates.keySet()) { - final DataSourceDeclaration dataSourceDeclaration = this.edbPredicates.get(predicate); - loadInMemoryDataSource(dataSourceDeclaration.getDataSource(), predicate); - } - for (final DataSourceDeclaration dataSourceDeclaration : this.aliasesForEdbPredicates.keySet()) { - final Predicate aliasPredicate = this.aliasesForEdbPredicates.get(dataSourceDeclaration); - loadInMemoryDataSource(dataSourceDeclaration.getDataSource(), aliasPredicate); - } - } - - void loadInMemoryDataSource(DataSource dataSource, Predicate predicate) { - final InMemoryDataSource inMemoryDataSource; - if (dataSource instanceof InMemoryDataSource) { - inMemoryDataSource = (InMemoryDataSource) dataSource; - } else { - return; - } - try { - final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(predicate); - this.vLog.addData(vLogPredicateName, inMemoryDataSource.getData()); - if (LOGGER.isDebugEnabled()) { - for (final String[] tuple : inMemoryDataSource.getData()) { - LOGGER.debug("Loaded direct fact {}{}.", vLogPredicateName, Arrays.toString(tuple)); - } - } - } catch (final EDBConfigurationException e) { - throw new RuntimeException("Invalid data sources configuration!", e); - } - } - - /** - * Checks if the loaded external data for a given source does in fact contain - * data of the correct arity for the given predidate. - * - * @param predicate the predicate for which data is loaded - * @param dataSource the data source used - * - * @throws IncompatiblePredicateArityException to indicate a problem - * (non-checked exception) - */ - void validateDataSourcePredicateArity(Predicate predicate, DataSource dataSource) - throws IncompatiblePredicateArityException { - if (dataSource == null) - return; - try { - final int dataSourcePredicateArity = this.vLog - .getPredicateArity(ModelToVLogConverter.toVLogPredicate(predicate)); - if (dataSourcePredicateArity == -1) { - LOGGER.warn("Data source {} for predicate {} is empty! ", dataSource, predicate); - } else if (predicate.getArity() != dataSourcePredicateArity) { - throw new IncompatiblePredicateArityException(predicate, dataSourcePredicateArity, dataSource); - } - } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state!", e); - } - } - - void loadFacts() { - for (final Predicate predicate : directEdbFacts.keySet()) { - Predicate aliasPredicate; - if (edbPredicates.containsKey(predicate)) { - aliasPredicate = predicate; - } else { - aliasPredicate = aliasesForEdbPredicates.get(new LocalFactsDataSourceDeclaration(predicate)); - } - try { - final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(aliasPredicate); - final String[][] vLogPredicateTuples = ModelToVLogConverter - .toVLogFactTuples(directEdbFacts.get(predicate)); - this.vLog.addData(vLogPredicateName, vLogPredicateTuples); - if (LOGGER.isDebugEnabled()) { - for (final String[] tuple : vLogPredicateTuples) { - LOGGER.debug("Loaded direct fact {}{}.", vLogPredicateName, Arrays.toString(tuple)); - } - } - } catch (final EDBConfigurationException e) { - throw new RuntimeException("Invalid data sources configuration!", e); - } - } - } - - void loadRules() { - final karmaresearch.vlog.Rule[] vLogRuleArray = ModelToVLogConverter.toVLogRuleArray(rules); - final karmaresearch.vlog.VLog.RuleRewriteStrategy vLogRuleRewriteStrategy = ModelToVLogConverter - .toVLogRuleRewriteStrategy(this.ruleRewriteStrategy); - try { - this.vLog.setRules(vLogRuleArray, vLogRuleRewriteStrategy); - if (LOGGER.isDebugEnabled()) { - for (final karmaresearch.vlog.Rule rule : vLogRuleArray) { - LOGGER.debug("Loaded rule {}.", rule.toString()); - } - } - } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state!", e); - } - } - - @Override - public boolean reason() throws IOException { - validateNotClosed(); - - switch (this.reasonerState) { - case KB_NOT_LOADED: - load(); - runChase(); - break; - case KB_LOADED: - runChase(); - break; - case KB_CHANGED: - resetReasoner(); - load(); - runChase(); - break; - case MATERIALISED: - runChase(); - break; - default: - break; - } - - return this.reasoningCompleted; - } - - private void runChase() { - LOGGER.info("Started materialisation of inferences ..."); - this.reasonerState = ReasonerState.MATERIALISED; - - final boolean skolemChase = this.algorithm == Algorithm.SKOLEM_CHASE; - try { - if (this.timeoutAfterSeconds == null) { - this.vLog.materialize(skolemChase); - this.reasoningCompleted = true; - } else { - this.reasoningCompleted = this.vLog.materialize(skolemChase, this.timeoutAfterSeconds); - } - } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); - } catch (final MaterializationException e) { - // FIXME: the message generated here is not guaranteed to be the correct - // interpretation of the exception that is caught - throw new RuntimeException( - "Knowledge base incompatible with stratified negation: either the Rules are not stratifiable, or the variables in negated atom cannot be bound.", - e); - } - - if (this.reasoningCompleted) { - this.correctness = Correctness.SOUND_AND_COMPLETE; - LOGGER.info("Completed materialisation of inferences."); - } else { - this.correctness = Correctness.SOUND_BUT_INCOMPLETE; - LOGGER.info("Stopped materialisation of inferences (possibly incomplete)."); - } - } - - @Override - public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBlanks) { - validateNotClosed(); - if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { - throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); - } - Validate.notNull(query, "Query atom must not be null!"); - - final boolean filterBlanks = !includeBlanks; - final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); - - TermQueryResultIterator stringQueryResultIterator; - try { - stringQueryResultIterator = this.vLog.query(vLogAtom, true, filterBlanks); - } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); - } catch (final NonExistingPredicateException e1) { - LOGGER.warn("Query uses predicate " + query.getPredicate() - + " that does not occur in the knowledge base. Answer must be empty!"); - return new EmptyQueryResultIterator(Correctness.SOUND_AND_COMPLETE); - } - - logWarningOnCorrectness(); - return new VLogQueryResultIterator(stringQueryResultIterator, this.correctness); - } - - @Override - public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final String csvFilePath, - final boolean includeBlanks) throws IOException { - validateNotClosed(); - if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { - throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); - } - Validate.notNull(query, "Query atom must not be null!"); - Validate.notNull(csvFilePath, "File to export query answer to must not be null!"); - Validate.isTrue(csvFilePath.endsWith(".csv"), "Expected .csv extension for file [%s]!", csvFilePath); - - final boolean filterBlanks = !includeBlanks; - final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); - try { - this.vLog.writeQueryResultsToCsv(vLogAtom, csvFilePath, filterBlanks); - } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state!", e); - } catch (final NonExistingPredicateException e1) { - throw new IllegalArgumentException(MessageFormat.format( - "The query predicate does not occur in the loaded Knowledge Base: {0}!", query.getPredicate()), e1); - } - - logWarningOnCorrectness(); - return this.correctness; - } - - private void logWarningOnCorrectness() { - if (this.correctness != Correctness.SOUND_AND_COMPLETE) { - LOGGER.warn("Query answers may be {} with respect to the current Knowledge Base!", this.correctness); - } - } - - @Override - public void resetReasoner() { - validateNotClosed(); - this.reasonerState = ReasonerState.KB_NOT_LOADED; - this.vLog.stop(); - LOGGER.info("Reasoner has been reset. All inferences computed during reasoning have been discarded."); - } - - @Override - public void close() { - if (this.reasonerState == ReasonerState.CLOSED) { - LOGGER.info("Reasoner is already closed."); - } else { - this.reasonerState = ReasonerState.CLOSED; - this.knowledgeBase.deleteListener(this); - this.vLog.stop(); - LOGGER.info("Reasoner closed."); - } - } - - @Override - public void setLogLevel(LogLevel logLevel) { - validateNotClosed(); - Validate.notNull(logLevel, "Log level cannot be null!"); - this.internalLogLevel = logLevel; - this.vLog.setLogLevel(ModelToVLogConverter.toVLogLogLevel(this.internalLogLevel)); - } - - @Override - public LogLevel getLogLevel() { - return this.internalLogLevel; - } - - @Override - public void setLogFile(String filePath) { - validateNotClosed(); - this.vLog.setLogFile(filePath); - } - - @Override - public boolean isJA() { - return checkAcyclicity(AcyclicityNotion.JA); - } - - @Override - public boolean isRJA() { - return checkAcyclicity(AcyclicityNotion.RJA); - } - - @Override - public boolean isMFA() { - return checkAcyclicity(AcyclicityNotion.MFA); - } - - @Override - public boolean isRMFA() { - return checkAcyclicity(AcyclicityNotion.RMFA); - } - - @Override - public boolean isMFC() { - validateNotClosed(); - if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { - throw new ReasonerStateException(this.reasonerState, - "Checking rules acyclicity is not allowed before loading!"); - } - - CyclicCheckResult checkCyclic; - try { - checkCyclic = this.vLog.checkCyclic("MFC"); - } catch (final NotStartedException e) { - throw new RuntimeException(e.getMessage(), e); // should be impossible - } - return checkCyclic.equals(CyclicCheckResult.CYCLIC); - } - - private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) { - validateNotClosed(); - if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { - try { - load(); - } catch (IOException e) { // FIXME: quick fix for https://github.com/knowsys/vlog4j/issues/128 - throw new RuntimeException(e); - } - } - - CyclicCheckResult checkCyclic; - try { - checkCyclic = this.vLog.checkCyclic(acyclNotion.name()); - } catch (final NotStartedException e) { - throw new RuntimeException(e.getMessage(), e); // should be impossible - } - return checkCyclic.equals(CyclicCheckResult.NON_CYCLIC); - } - - @Override - public CyclicityResult checkForCycles() { - final boolean acyclic = isJA() || isRJA() || isMFA() || isRMFA(); - if (acyclic) { - return CyclicityResult.ACYCLIC; - } else { - final boolean cyclic = isMFC(); - if (cyclic) { - return CyclicityResult.CYCLIC; - } - return CyclicityResult.UNDETERMINED; - } - } - - @Override - public void onStatementsAdded(List statementsAdded) { - // TODO more elaborate materialisation state handling - - updateReasonerToKnowledgeBaseChanged(); - - // updateCorrectnessOnStatementsAdded(statementsAdded); - updateCorrectness(); - } - - @Override - public void onStatementAdded(Statement statementAdded) { - // TODO more elaborate materialisation state handling - - updateReasonerToKnowledgeBaseChanged(); - - // updateCorrectnessOnStatementAdded(statementAdded); - updateCorrectness(); - } - - private void updateReasonerToKnowledgeBaseChanged() { - if (this.reasonerState.equals(ReasonerState.KB_LOADED) - || this.reasonerState.equals(ReasonerState.MATERIALISED)) { - - this.reasonerState = ReasonerState.KB_CHANGED; - } - } - - private void updateCorrectness() { - if (this.reasonerState == ReasonerState.KB_CHANGED) { - - final boolean noRules = this.knowledgeBase.getRules().isEmpty(); - this.correctness = noRules ? Correctness.SOUND_BUT_INCOMPLETE : Correctness.INCORRECT; - } - } - - /** - * Check if reasoner is closed and throw an exception if it is. - * - * @throws ReasonerStateException - */ - void validateNotClosed() throws ReasonerStateException { - if (this.reasonerState == ReasonerState.CLOSED) { - LOGGER.error("Invalid operation requested on a closed reasoner object!"); - throw new ReasonerStateException(this.reasonerState, "Operation not allowed after closing reasoner!"); - } - } - -} +package org.semanticweb.vlog4j.core.reasoner.implementation; + +import java.io.IOException; +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Formatter; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.apache.commons.lang3.Validate; +import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; +import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.Literal; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Statement; +import org.semanticweb.vlog4j.core.model.api.StatementVisitor; +import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; +import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; +import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; +import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; +import org.semanticweb.vlog4j.core.model.implementation.UniversalVariableImpl; +import org.semanticweb.vlog4j.core.reasoner.AcyclicityNotion; +import org.semanticweb.vlog4j.core.reasoner.Algorithm; +import org.semanticweb.vlog4j.core.reasoner.Correctness; +import org.semanticweb.vlog4j.core.reasoner.CyclicityResult; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.LogLevel; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; +import org.semanticweb.vlog4j.core.reasoner.Reasoner; +import org.semanticweb.vlog4j.core.reasoner.ReasonerState; +import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import karmaresearch.vlog.AlreadyStartedException; +import karmaresearch.vlog.EDBConfigurationException; +import karmaresearch.vlog.MaterializationException; +import karmaresearch.vlog.NonExistingPredicateException; +import karmaresearch.vlog.NotStartedException; +import karmaresearch.vlog.TermQueryResultIterator; +import karmaresearch.vlog.VLog; +import karmaresearch.vlog.VLog.CyclicCheckResult; + +/* + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * Reasoner implementation using the VLog backend. + * + * + * + * @author Markus Kroetzsch + * + */ +public class VLogReasoner implements Reasoner { + private static Logger LOGGER = LoggerFactory.getLogger(VLogReasoner.class); + + /** + * Dummy data source declaration for predicates for which we have explicit local + * facts in the input. + * + * @author Markus Kroetzsch + * + */ + class LocalFactsDataSourceDeclaration implements DataSourceDeclaration { + + final Predicate predicate; + + public LocalFactsDataSourceDeclaration(Predicate predicate) { + this.predicate = predicate; + } + + @Override + public T accept(StatementVisitor statementVisitor) { + return statementVisitor.visit(this); + } + + @Override + public Predicate getPredicate() { + return this.predicate; + } + + @Override + public DataSource getDataSource() { + return null; + } + + @Override + public int hashCode() { + return predicate.hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + final LocalFactsDataSourceDeclaration other = (LocalFactsDataSourceDeclaration) obj; + return predicate.equals(other.predicate); + } + + } + + /** + * Local visitor implementation for processing statements upon loading. Internal + * index structures are updated based on the statements that are detected. + * + * @author Markus Kroetzsch + * + */ + class LoadKbVisitor implements StatementVisitor { + + public void clearIndexes() { + edbPredicates.clear(); + idbPredicates.clear(); + aliasedEdbPredicates.clear(); + aliasesForEdbPredicates.clear(); + directEdbFacts.clear(); + rules.clear(); + } + + @Override + public Void visit(Fact statement) { + final Predicate predicate = statement.getPredicate(); + registerEdbDeclaration(new LocalFactsDataSourceDeclaration(predicate)); + if (!directEdbFacts.containsKey(predicate)) { + final List facts = new ArrayList(); + facts.add(statement); + directEdbFacts.put(predicate, facts); + } else { + directEdbFacts.get(predicate).add(statement); + } + return null; + } + + @Override + public Void visit(Rule statement) { + rules.add(statement); + for (final PositiveLiteral positiveLiteral : statement.getHead()) { + final Predicate predicate = positiveLiteral.getPredicate(); + if (!idbPredicates.contains(predicate)) { + if (edbPredicates.containsKey(predicate)) { + addEdbAlias(edbPredicates.get(predicate)); + edbPredicates.remove(predicate); + } + idbPredicates.add(predicate); + } + } + return null; + } + + @Override + public Void visit(DataSourceDeclaration statement) { + registerEdbDeclaration(statement); + return null; + } + + void registerEdbDeclaration(DataSourceDeclaration dataSourceDeclaration) { + final Predicate predicate = dataSourceDeclaration.getPredicate(); + if (idbPredicates.contains(predicate) || aliasedEdbPredicates.contains(predicate)) { + if (!aliasesForEdbPredicates.containsKey(dataSourceDeclaration)) { + addEdbAlias(dataSourceDeclaration); + } + } else { + final DataSourceDeclaration currentMainDeclaration = edbPredicates.get(predicate); + if (currentMainDeclaration == null) { + edbPredicates.put(predicate, dataSourceDeclaration); + } else if (!(currentMainDeclaration.equals(dataSourceDeclaration))) { + addEdbAlias(currentMainDeclaration); + addEdbAlias(dataSourceDeclaration); + edbPredicates.remove(predicate); + } // else: predicate already known to have local facts (only) + } + } + + void addEdbAlias(DataSourceDeclaration dataSourceDeclaration) { + final Predicate predicate = dataSourceDeclaration.getPredicate(); + Predicate aliasPredicate; + if (dataSourceDeclaration instanceof LocalFactsDataSourceDeclaration) { + aliasPredicate = new PredicateImpl(predicate.getName() + "##FACT", predicate.getArity()); + } else { + aliasPredicate = new PredicateImpl(predicate.getName() + "##" + dataSourceDeclaration.hashCode(), + predicate.getArity()); + } + aliasesForEdbPredicates.put(dataSourceDeclaration, aliasPredicate); + aliasedEdbPredicates.add(predicate); + + final List terms = new ArrayList<>(); + for (int i = 1; i <= predicate.getArity(); i++) { + terms.add(new UniversalVariableImpl("X" + i)); + } + final Literal body = new PositiveLiteralImpl(aliasPredicate, terms); + final PositiveLiteral head = new PositiveLiteralImpl(predicate, terms); + final Rule rule = new RuleImpl(new ConjunctionImpl(Arrays.asList(head)), + new ConjunctionImpl(Arrays.asList(body))); + rules.add(rule); + } + + } + + final KnowledgeBase knowledgeBase; + final VLog vLog = new VLog(); + + final Map aliasesForEdbPredicates = new HashMap<>(); + final Set idbPredicates = new HashSet<>(); + final Map edbPredicates = new HashMap<>(); + final Set aliasedEdbPredicates = new HashSet<>(); + final Map> directEdbFacts = new HashMap<>(); + final Set rules = new HashSet<>(); + + private ReasonerState reasonerState = ReasonerState.KB_NOT_LOADED; + private Correctness correctness = Correctness.SOUND_BUT_INCOMPLETE; + + private LogLevel internalLogLevel = LogLevel.WARNING; + private Algorithm algorithm = Algorithm.RESTRICTED_CHASE; + private Integer timeoutAfterSeconds; + private RuleRewriteStrategy ruleRewriteStrategy = RuleRewriteStrategy.NONE; + + /** + * Holds the state of the reasoning result. Has value {@code true} if reasoning + * has completed, {@code false} if it has been interrupted. + */ + private boolean reasoningCompleted; + + public VLogReasoner(KnowledgeBase knowledgeBase) { + super(); + this.knowledgeBase = knowledgeBase; + this.knowledgeBase.addListener(this); + + setLogLevel(this.internalLogLevel); + } + + @Override + public KnowledgeBase getKnowledgeBase() { + return this.knowledgeBase; + } + + @Override + public void setAlgorithm(final Algorithm algorithm) { + Validate.notNull(algorithm, "Algorithm cannot be null!"); + validateNotClosed(); + this.algorithm = algorithm; + } + + @Override + public Algorithm getAlgorithm() { + return this.algorithm; + } + + @Override + public void setReasoningTimeout(Integer seconds) { + validateNotClosed(); + if (seconds != null) { + Validate.isTrue(seconds > 0, "Only strictly positive timeout period allowed!", seconds); + } + this.timeoutAfterSeconds = seconds; + } + + @Override + public Integer getReasoningTimeout() { + return this.timeoutAfterSeconds; + } + + @Override + public void setRuleRewriteStrategy(RuleRewriteStrategy ruleRewritingStrategy) { + validateNotClosed(); + Validate.notNull(ruleRewritingStrategy, "Rewrite strategy cannot be null!"); + this.ruleRewriteStrategy = ruleRewritingStrategy; + } + + @Override + public RuleRewriteStrategy getRuleRewriteStrategy() { + return this.ruleRewriteStrategy; + } + + /* + * TODO Due to automatic predicate renaming, it can happen that an EDB predicate + * cannot be queried after loading unless reasoning has already been invoked + * (since the auxiliary rule that imports the EDB facts to the "real" predicate + * must be used). This issue could be weakened by rewriting queries to + * (single-source) EDB predicates internally when in such a state, + */ + // @Override + void load() throws IOException { + validateNotClosed(); + + switch (this.reasonerState) { + case KB_NOT_LOADED: + loadKnowledgeBase(); + break; + case KB_LOADED: + case MATERIALISED: + // do nothing, all KB is already loaded + break; + case KB_CHANGED: + resetReasoner(); + loadKnowledgeBase(); + default: + break; + } + } + + void loadKnowledgeBase() throws IOException { + LOGGER.info("Started loading knowledge base ..."); + final LoadKbVisitor visitor = new LoadKbVisitor(); + visitor.clearIndexes(); + for (final Statement statement : knowledgeBase) { + statement.accept(visitor); + } + + if (edbPredicates.isEmpty() && aliasedEdbPredicates.isEmpty()) { + LOGGER.warn("No facts have been provided."); + } + + try { + this.vLog.start(getDataSourcesConfigurationString(), false); + } catch (final AlreadyStartedException e) { + throw new RuntimeException("Inconsistent reasoner state.", e); + } catch (final EDBConfigurationException e) { + throw new RuntimeException("Invalid data sources configuration.", e); + } + loadInMemoryDataSources(); + + validateDataSourcePredicateArities(); + + loadFacts(); + loadRules(); + + this.reasonerState = ReasonerState.KB_LOADED; + + // if there are no rules, then materialisation state is complete + this.correctness = rules.isEmpty() ? Correctness.SOUND_AND_COMPLETE : Correctness.SOUND_BUT_INCOMPLETE; + + LOGGER.info("Finished loading knowledge base."); + } + + String getDataSourcesConfigurationString() { + final StringBuilder configStringBuilder = new StringBuilder(); + final Formatter formatter = new Formatter(configStringBuilder); + int dataSourceIndex = 0; + for (final Predicate predicate : this.edbPredicates.keySet()) { + final DataSourceDeclaration dataSourceDeclaration = this.edbPredicates.get(predicate); + dataSourceIndex = addDataSourceConfigurationString(dataSourceDeclaration.getDataSource(), predicate, + dataSourceIndex, formatter); + } + for (final DataSourceDeclaration dataSourceDeclaration : this.aliasesForEdbPredicates.keySet()) { + final Predicate aliasPredicate = this.aliasesForEdbPredicates.get(dataSourceDeclaration); + dataSourceIndex = addDataSourceConfigurationString(dataSourceDeclaration.getDataSource(), aliasPredicate, + dataSourceIndex, formatter); + } + formatter.close(); + return configStringBuilder.toString(); + } + + int addDataSourceConfigurationString(DataSource dataSource, Predicate predicate, int dataSourceIndex, + Formatter formatter) { + if (dataSource != null) { + final String configString = dataSource.toConfigString(); + if (configString != null) { + formatter.format(dataSource.toConfigString(), dataSourceIndex, + ModelToVLogConverter.toVLogPredicate(predicate)); + return dataSourceIndex + 1; + } + } + return dataSourceIndex; + } + + /** + * Checks if the loaded external data sources do in fact contain data of the + * correct arity. + * + * @throws IncompatiblePredicateArityException to indicate a problem + * (non-checked exception) + */ + void validateDataSourcePredicateArities() throws IncompatiblePredicateArityException { + for (final Predicate predicate : edbPredicates.keySet()) { + validateDataSourcePredicateArity(predicate, edbPredicates.get(predicate).getDataSource()); + } + for (final DataSourceDeclaration dataSourceDeclaration : aliasesForEdbPredicates.keySet()) { + validateDataSourcePredicateArity(aliasesForEdbPredicates.get(dataSourceDeclaration), + dataSourceDeclaration.getDataSource()); + } + } + + void loadInMemoryDataSources() { + for (final Predicate predicate : this.edbPredicates.keySet()) { + final DataSourceDeclaration dataSourceDeclaration = this.edbPredicates.get(predicate); + loadInMemoryDataSource(dataSourceDeclaration.getDataSource(), predicate); + } + for (final DataSourceDeclaration dataSourceDeclaration : this.aliasesForEdbPredicates.keySet()) { + final Predicate aliasPredicate = this.aliasesForEdbPredicates.get(dataSourceDeclaration); + loadInMemoryDataSource(dataSourceDeclaration.getDataSource(), aliasPredicate); + } + } + + void loadInMemoryDataSource(DataSource dataSource, Predicate predicate) { + final InMemoryDataSource inMemoryDataSource; + if (dataSource instanceof InMemoryDataSource) { + inMemoryDataSource = (InMemoryDataSource) dataSource; + } else { + return; + } + try { + final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(predicate); + this.vLog.addData(vLogPredicateName, inMemoryDataSource.getData()); + if (LOGGER.isDebugEnabled()) { + for (final String[] tuple : inMemoryDataSource.getData()) { + LOGGER.debug("Loaded direct fact {}{}.", vLogPredicateName, Arrays.toString(tuple)); + } + } + } catch (final EDBConfigurationException e) { + throw new RuntimeException("Invalid data sources configuration!", e); + } + } + + /** + * Checks if the loaded external data for a given source does in fact contain + * data of the correct arity for the given predidate. + * + * @param predicate the predicate for which data is loaded + * @param dataSource the data source used + * + * @throws IncompatiblePredicateArityException to indicate a problem + * (non-checked exception) + */ + void validateDataSourcePredicateArity(Predicate predicate, DataSource dataSource) + throws IncompatiblePredicateArityException { + if (dataSource == null) + return; + try { + final int dataSourcePredicateArity = this.vLog + .getPredicateArity(ModelToVLogConverter.toVLogPredicate(predicate)); + if (dataSourcePredicateArity == -1) { + LOGGER.warn("Data source {} for predicate {} is empty! ", dataSource, predicate); + } else if (predicate.getArity() != dataSourcePredicateArity) { + throw new IncompatiblePredicateArityException(predicate, dataSourcePredicateArity, dataSource); + } + } catch (final NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state!", e); + } + } + + void loadFacts() { + for (final Predicate predicate : directEdbFacts.keySet()) { + Predicate aliasPredicate; + if (edbPredicates.containsKey(predicate)) { + aliasPredicate = predicate; + } else { + aliasPredicate = aliasesForEdbPredicates.get(new LocalFactsDataSourceDeclaration(predicate)); + } + try { + final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(aliasPredicate); + final String[][] vLogPredicateTuples = ModelToVLogConverter + .toVLogFactTuples(directEdbFacts.get(predicate)); + this.vLog.addData(vLogPredicateName, vLogPredicateTuples); + if (LOGGER.isDebugEnabled()) { + for (final String[] tuple : vLogPredicateTuples) { + LOGGER.debug("Loaded direct fact {}{}.", vLogPredicateName, Arrays.toString(tuple)); + } + } + } catch (final EDBConfigurationException e) { + throw new RuntimeException("Invalid data sources configuration!", e); + } + } + } + + void loadRules() { + final karmaresearch.vlog.Rule[] vLogRuleArray = ModelToVLogConverter.toVLogRuleArray(rules); + final karmaresearch.vlog.VLog.RuleRewriteStrategy vLogRuleRewriteStrategy = ModelToVLogConverter + .toVLogRuleRewriteStrategy(this.ruleRewriteStrategy); + try { + this.vLog.setRules(vLogRuleArray, vLogRuleRewriteStrategy); + if (LOGGER.isDebugEnabled()) { + for (final karmaresearch.vlog.Rule rule : vLogRuleArray) { + LOGGER.debug("Loaded rule {}.", rule.toString()); + } + } + } catch (final NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state!", e); + } + } + + @Override + public boolean reason() throws IOException { + validateNotClosed(); + + switch (this.reasonerState) { + case KB_NOT_LOADED: + load(); + runChase(); + break; + case KB_LOADED: + runChase(); + break; + case KB_CHANGED: + resetReasoner(); + load(); + runChase(); + break; + case MATERIALISED: + runChase(); + break; + default: + break; + } + + return this.reasoningCompleted; + } + + private void runChase() { + LOGGER.info("Started materialisation of inferences ..."); + this.reasonerState = ReasonerState.MATERIALISED; + + final boolean skolemChase = this.algorithm == Algorithm.SKOLEM_CHASE; + try { + if (this.timeoutAfterSeconds == null) { + this.vLog.materialize(skolemChase); + this.reasoningCompleted = true; + } else { + this.reasoningCompleted = this.vLog.materialize(skolemChase, this.timeoutAfterSeconds); + } + } catch (final NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state.", e); + } catch (final MaterializationException e) { + // FIXME: the message generated here is not guaranteed to be the correct + // interpretation of the exception that is caught + throw new RuntimeException( + "Knowledge base incompatible with stratified negation: either the Rules are not stratifiable, or the variables in negated atom cannot be bound.", + e); + } + + if (this.reasoningCompleted) { + this.correctness = Correctness.SOUND_AND_COMPLETE; + LOGGER.info("Completed materialisation of inferences."); + } else { + this.correctness = Correctness.SOUND_BUT_INCOMPLETE; + LOGGER.info("Stopped materialisation of inferences (possibly incomplete)."); + } + } + + @Override + public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBlanks) { + validateNotClosed(); + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { + throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); + } + Validate.notNull(query, "Query atom must not be null!"); + + final boolean filterBlanks = !includeBlanks; + final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); + + TermQueryResultIterator stringQueryResultIterator; + try { + stringQueryResultIterator = this.vLog.query(vLogAtom, true, filterBlanks); + } catch (final NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state.", e); + } catch (final NonExistingPredicateException e1) { + LOGGER.warn("Query uses predicate " + query.getPredicate() + + " that does not occur in the knowledge base. Answer must be empty!"); + return new EmptyQueryResultIterator(Correctness.SOUND_AND_COMPLETE); + } + + logWarningOnCorrectness(); + return new VLogQueryResultIterator(stringQueryResultIterator, this.correctness); + } + + @Override + public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final String csvFilePath, + final boolean includeBlanks) throws IOException { + validateNotClosed(); + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { + throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); + } + Validate.notNull(query, "Query atom must not be null!"); + Validate.notNull(csvFilePath, "File to export query answer to must not be null!"); + Validate.isTrue(csvFilePath.endsWith(".csv"), "Expected .csv extension for file [%s]!", csvFilePath); + + final boolean filterBlanks = !includeBlanks; + final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); + try { + this.vLog.writeQueryResultsToCsv(vLogAtom, csvFilePath, filterBlanks); + } catch (final NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state!", e); + } catch (final NonExistingPredicateException e1) { + throw new IllegalArgumentException(MessageFormat.format( + "The query predicate does not occur in the loaded Knowledge Base: {0}!", query.getPredicate()), e1); + } + + logWarningOnCorrectness(); + return this.correctness; + } + + private void logWarningOnCorrectness() { + if (this.correctness != Correctness.SOUND_AND_COMPLETE) { + LOGGER.warn("Query answers may be {} with respect to the current Knowledge Base!", this.correctness); + } + } + + @Override + public void resetReasoner() { + validateNotClosed(); + this.reasonerState = ReasonerState.KB_NOT_LOADED; + this.vLog.stop(); + LOGGER.info("Reasoner has been reset. All inferences computed during reasoning have been discarded."); + } + + @Override + public void close() { + if (this.reasonerState == ReasonerState.CLOSED) { + LOGGER.info("Reasoner is already closed."); + } else { + this.reasonerState = ReasonerState.CLOSED; + this.knowledgeBase.deleteListener(this); + this.vLog.stop(); + LOGGER.info("Reasoner closed."); + } + } + + @Override + public void setLogLevel(LogLevel logLevel) { + validateNotClosed(); + Validate.notNull(logLevel, "Log level cannot be null!"); + this.internalLogLevel = logLevel; + this.vLog.setLogLevel(ModelToVLogConverter.toVLogLogLevel(this.internalLogLevel)); + } + + @Override + public LogLevel getLogLevel() { + return this.internalLogLevel; + } + + @Override + public void setLogFile(String filePath) { + validateNotClosed(); + this.vLog.setLogFile(filePath); + } + + @Override + public boolean isJA() { + return checkAcyclicity(AcyclicityNotion.JA); + } + + @Override + public boolean isRJA() { + return checkAcyclicity(AcyclicityNotion.RJA); + } + + @Override + public boolean isMFA() { + return checkAcyclicity(AcyclicityNotion.MFA); + } + + @Override + public boolean isRMFA() { + return checkAcyclicity(AcyclicityNotion.RMFA); + } + + @Override + public boolean isMFC() { + validateNotClosed(); + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { + throw new ReasonerStateException(this.reasonerState, + "Checking rules acyclicity is not allowed before loading!"); + } + + CyclicCheckResult checkCyclic; + try { + checkCyclic = this.vLog.checkCyclic("MFC"); + } catch (final NotStartedException e) { + throw new RuntimeException(e.getMessage(), e); // should be impossible + } + return checkCyclic.equals(CyclicCheckResult.CYCLIC); + } + + private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) { + validateNotClosed(); + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { + try { + load(); + } catch (IOException e) { // FIXME: quick fix for https://github.com/knowsys/vlog4j/issues/128 + throw new RuntimeException(e); + } + } + + CyclicCheckResult checkCyclic; + try { + checkCyclic = this.vLog.checkCyclic(acyclNotion.name()); + } catch (final NotStartedException e) { + throw new RuntimeException(e.getMessage(), e); // should be impossible + } + return checkCyclic.equals(CyclicCheckResult.NON_CYCLIC); + } + + @Override + public CyclicityResult checkForCycles() { + final boolean acyclic = isJA() || isRJA() || isMFA() || isRMFA(); + if (acyclic) { + return CyclicityResult.ACYCLIC; + } else { + final boolean cyclic = isMFC(); + if (cyclic) { + return CyclicityResult.CYCLIC; + } + return CyclicityResult.UNDETERMINED; + } + } + + @Override + public void onStatementsAdded(List statementsAdded) { + // TODO more elaborate materialisation state handling + + updateReasonerToKnowledgeBaseChanged(); + + // updateCorrectnessOnStatementsAdded(statementsAdded); + updateCorrectnessOnStatementsAdded(); + } + + @Override + public void onStatementAdded(Statement statementAdded) { + // TODO more elaborate materialisation state handling + + updateReasonerToKnowledgeBaseChanged(); + + // updateCorrectnessOnStatementAdded(statementAdded); + updateCorrectnessOnStatementsAdded(); + } + + @Override + public void onStatementRemoved(Statement statementRemoved) { + updateReasonerToKnowledgeBaseChanged(); + updateCorrectnessOnStatementsRemoved(); + } + + @Override + public void onStatementsRemoved(List statementsRemoved) { + updateReasonerToKnowledgeBaseChanged(); + updateCorrectnessOnStatementsRemoved(); + } + + private void updateReasonerToKnowledgeBaseChanged() { + if (this.reasonerState.equals(ReasonerState.KB_LOADED) + || this.reasonerState.equals(ReasonerState.MATERIALISED)) { + + this.reasonerState = ReasonerState.KB_CHANGED; + } + } + + private void updateCorrectnessOnStatementsAdded() { + if (this.reasonerState == ReasonerState.KB_CHANGED) { + // TODO refine + this.correctness = Correctness.INCORRECT; + } + } + + private void updateCorrectnessOnStatementsRemoved() { + if (this.reasonerState == ReasonerState.KB_CHANGED) { + // TODO refine + this.correctness = Correctness.INCORRECT; + } + } + + /** + * Check if reasoner is closed and throw an exception if it is. + * + * @throws ReasonerStateException + */ + void validateNotClosed() throws ReasonerStateException { + if (this.reasonerState == ReasonerState.CLOSED) { + LOGGER.error("Invalid operation requested on a closed reasoner object!"); + throw new ReasonerStateException(this.reasonerState, "Operation not allowed after closing reasoner!"); + } + } + + ReasonerState getReasonerState() { + return this.reasonerState; + } + + void setReasonerState(ReasonerState reasonerState) { + this.reasonerState = reasonerState; + } + +} \ No newline at end of file From de7487584a0aaaa1a449e70dbfccdd144d6e5f32 Mon Sep 17 00:00:00 2001 From: alloka Date: Mon, 2 Dec 2019 16:16:52 +0100 Subject: [PATCH 0388/1003] changed names --- .../org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index bdf9ac788..8ba1fb964 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -413,7 +413,7 @@ public void testBlankPredicateName() throws ParsingException { } @Test - public void predicateNormalIriEqualityTest() throws ParsingException { + public void predicateRelativeNumericIRITest() throws ParsingException { AbstractConstantImpl a = new AbstractConstantImpl("a"); Fact f = RuleParser.parseFact("<1.e1>(a)."); Fact f2 = Expressions.makeFact("1.e1", a); @@ -421,7 +421,7 @@ public void predicateNormalIriEqualityTest() throws ParsingException { } @Test - public void predicateNormalIriEqualityTest2() throws ParsingException { + public void predicateAbsoluteIRITest() throws ParsingException { AbstractConstantImpl a = new AbstractConstantImpl("a"); Fact f = RuleParser.parseFact("(a)."); Fact f2 = Expressions.makeFact("a:b", a); From da60ed0ec3a52d3520d4e685be999b9bce1c09a1 Mon Sep 17 00:00:00 2001 From: alloka Date: Mon, 2 Dec 2019 16:31:19 +0100 Subject: [PATCH 0389/1003] added some styles --- .../vlog4j/core/model/implementation/Serializer.java | 11 +++++++---- .../core/reasoner/implementation/VLogReasoner.java | 2 +- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index ad3d345b2..122b9fc07 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -88,6 +88,10 @@ private static String checkRelativeAbsoluteIri(String string) { } } + private static String addQuote(String string) { + return QUOTE + string + QUOTE; + } + /** * Creates a String representation of a given {@link Rule}. * @@ -240,8 +244,7 @@ public static String getString(Conjunction conjunction) { * {@link LanguageStringConstant}. */ public static String getConstantName(LanguageStringConstant languageStringConstant) { - return QUOTE + escape(languageStringConstant.getString()) + QUOTE + AT - + languageStringConstant.getLanguageTag(); + return addQuote(escape(languageStringConstant.getString())) + AT + languageStringConstant.getLanguageTag(); } /** @@ -255,7 +258,7 @@ public static String getConstantName(LanguageStringConstant languageStringConsta */ public static String getString(DatatypeConstant datatypeConstant) { if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_STRING)) { - return QUOTE + datatypeConstant.getLexicalValue() + QUOTE; + return addQuote(datatypeConstant.getLexicalValue()); } else { if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DECIMAL) || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_INTEGER) @@ -278,7 +281,7 @@ public static String getString(DatatypeConstant datatypeConstant) { * {@link DatatypeConstant}. */ public static String getConstantName(DatatypeConstant datatypeConstant) { - return QUOTE + escape(datatypeConstant.getLexicalValue()) + QUOTE + CARET + CARET + LESS_THAN + return addQuote(escape(datatypeConstant.getLexicalValue())) + CARET + CARET + LESS_THAN + datatypeConstant.getDatatype() + MORE_THAN; } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 5376e0896..91ed463f0 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -809,4 +809,4 @@ void setReasonerState(ReasonerState reasonerState) { this.reasonerState = reasonerState; } -} \ No newline at end of file +} From 2dd34cc6df70903acd1091d9583cb84d90649812 Mon Sep 17 00:00:00 2001 From: alloka Date: Mon, 2 Dec 2019 21:38:11 +0100 Subject: [PATCH 0390/1003] fixed --- .../vlog4j/core/reasoner/implementation/VLogReasoner.java | 1 - 1 file changed, 1 deletion(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 91ed463f0..260db93e2 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -808,5 +808,4 @@ ReasonerState getReasonerState() { void setReasonerState(ReasonerState reasonerState) { this.reasonerState = reasonerState; } - } From 967430814ab7823bdb7899c7225b7e49d59e4757 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 25 Nov 2019 17:05:19 +0100 Subject: [PATCH 0391/1003] Bump maven-surefire-plugin version --- pom.xml | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 551a99f16..52d593caa 100644 --- a/pom.xml +++ b/pom.xml @@ -137,7 +137,7 @@
    - org.eclipse.m2e lifecycle-mapping @@ -208,7 +208,7 @@ license-maven-plugin - org.apache.maven.plugins maven-compiler-plugin @@ -218,6 +218,11 @@ 1.8 + + org.apache.maven.plugins + maven-surefire-plugin + 3.0.0-M4 + org.eluder.coveralls From cd21005e6db00bcb3cd7b526edd4b63df83ca792 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 25 Nov 2019 17:38:23 +0100 Subject: [PATCH 0392/1003] Prefer jacoco over cobertura Fixes #85. --- coverage/LICENSE.txt | 201 +++++++++++++++++++++++++++++++++++++++++++ coverage/pom.xml | 76 ++++++++++++++++ pom.xml | 65 +++++++------- 3 files changed, 311 insertions(+), 31 deletions(-) create mode 100644 coverage/LICENSE.txt create mode 100644 coverage/pom.xml diff --git a/coverage/LICENSE.txt b/coverage/LICENSE.txt new file mode 100644 index 000000000..261eeb9e9 --- /dev/null +++ b/coverage/LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/coverage/pom.xml b/coverage/pom.xml new file mode 100644 index 000000000..17c0c49b1 --- /dev/null +++ b/coverage/pom.xml @@ -0,0 +1,76 @@ + + + 4.0.0 + + + org.semanticweb.vlog4j + vlog4j-parent + 0.5.0-SNAPSHOT + + + coverage + + coverage + + + org.semanticweb.vlog4j + vlog4j-core + 0.5.0-SNAPSHOT + + + org.semanticweb.vlog4j + vlog4j-rdf + 0.5.0-SNAPSHOT + + + org.semanticweb.vlog4j + vlog4j-examples + 0.5.0-SNAPSHOT + + + org.semanticweb.vlog4j + vlog4j-owlapi + 0.5.0-SNAPSHOT + + + org.semanticweb.vlog4j + vlog4j-graal + 0.5.0-SNAPSHOT + + + org.semanticweb.vlog4j + vlog4j-parser + 0.5.0-SNAPSHOT + + + + + + + org.jacoco + jacoco-maven-plugin + + + aggregate-reports-ut + test + + report-aggregate + + + + + **/javacc/JavaCC*.class + **/javacc/JavaCharStream.class + **/javacc/ParseException.class + **/javacc/SimpleCharStream.class + **/javacc/Token.class + **/javacc/TokenMgrError.class + + + + + + + + diff --git a/pom.xml b/pom.xml index 52d593caa..0d33b88b2 100644 --- a/pom.xml +++ b/pom.xml @@ -15,6 +15,9 @@ https://github.com/knowsys/vlog4j + vlog4j-core vlog4j-rdf vlog4j-examples @@ -22,6 +25,7 @@ vlog4j-graal vlog4j-parser vlog4j-client + coverage @@ -200,6 +204,11 @@ + + org.jacoco + jacoco-maven-plugin + 0.8.5 + @@ -218,11 +227,16 @@ 1.8 - - org.apache.maven.plugins - maven-surefire-plugin - 3.0.0-M4 - + + org.apache.maven.plugins + maven-surefire-plugin + 3.0.0-M4 + + ${surefireArgLine} + 1C + true + + org.eluder.coveralls @@ -230,34 +244,23 @@ 4.3.0 - - - org.codehaus.mojo - cobertura-maven-plugin - 2.7 - - xml - - 256m - - true - - - - **/javacc/JavaCC*.class - **/javacc/JavaCharStream.class - **/javacc/ParseException.class - **/javacc/SimpleCharStream.class - **/javacc/Token.class - **/javacc/TokenMgrError.class - - - + + org.jacoco + jacoco-maven-plugin + + + prepare-agent + + prepare-agent + + + surefireArgLine + + + - org.apache.maven.plugins maven-javadoc-plugin From 23218990877378b3becf30275d8312295d5c8d4d Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 25 Nov 2019 17:51:36 +0100 Subject: [PATCH 0393/1003] Update travis hooks to use jacoco --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 24e616797..98555fd73 100644 --- a/.travis.yml +++ b/.travis.yml @@ -13,7 +13,7 @@ jdk: install: mvn install $OPTIONS -DskipTests=true after_success: - - mvn clean cobertura:cobertura coveralls:report + - mvn clean test jacoco:report coveralls:report dist: trusty sudo: false From bb16b979cf6be86753a59822b113d302d401a48b Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 25 Nov 2019 18:37:10 +0100 Subject: [PATCH 0394/1003] Update exclusion list for jacoco --- coverage/pom.xml | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/coverage/pom.xml b/coverage/pom.xml index 17c0c49b1..fcd928ba5 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -60,12 +60,14 @@ - **/javacc/JavaCC*.class - **/javacc/JavaCharStream.class - **/javacc/ParseException.class - **/javacc/SimpleCharStream.class - **/javacc/Token.class - **/javacc/TokenMgrError.class + **/javacc/JavaCCParser.java + **/javacc/JavaCCParserConstants.java + **/javacc/JavaCCParserTokenManager.java + **/javacc/JavaCharStream.java + **/javacc/ParseException.java + **/javacc/SimpleCharStream.java + **/javacc/Token.java + **/javacc/TokenMgrError.java From 046079e3c32828e0118b68c12d4f7dbe886965fd Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 25 Nov 2019 20:09:49 +0100 Subject: [PATCH 0395/1003] Fix exclusion of generated source files --- coverage/pom.xml | 18 +++++----------- pom.xml | 54 ++++++++++++++++++++++++++++++++++++++++++------ 2 files changed, 53 insertions(+), 19 deletions(-) diff --git a/coverage/pom.xml b/coverage/pom.xml index fcd928ba5..c3f0ccadb 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -47,6 +47,11 @@ + + + org.eluder.coveralls + coveralls-maven-plugin + org.jacoco jacoco-maven-plugin @@ -57,19 +62,6 @@ report-aggregate - - - - **/javacc/JavaCCParser.java - **/javacc/JavaCCParserConstants.java - **/javacc/JavaCCParserTokenManager.java - **/javacc/JavaCharStream.java - **/javacc/ParseException.java - **/javacc/SimpleCharStream.java - **/javacc/Token.java - **/javacc/TokenMgrError.java - - diff --git a/pom.xml b/pom.xml index 0d33b88b2..bf1cdca27 100644 --- a/pom.xml +++ b/pom.xml @@ -238,10 +238,25 @@ - - org.eluder.coveralls - coveralls-maven-plugin - 4.3.0 + + org.eluder.coveralls + coveralls-maven-plugin + 4.3.0 + + + coverage/target/site/jacoco-aggregate/jacoco.xml + + + + + + javax.xml.bind + jaxb-api + 2.3.1 + + @@ -257,11 +272,38 @@ surefireArgLine + + default-cli + + report + + test + + + ${project.reporting.outputDirectory}/jacoco-ut + + + + + + **/javacc/JavaCCParser* + **/javacc/JavaCCParserConstants* + **/javacc/JavaCCParserTokenManager* + **/javacc/JavaCharStream* + **/javacc/ParseException* + **/javacc/SimpleCharStream* + **/javacc/Token* + **/javacc/TokenMgrError* + + - + org.apache.maven.plugins maven-javadoc-plugin ${maven.javadoc.version} From 0140a95c2ad70e7bf5276ade6a4bdcdc34ad668a Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 25 Nov 2019 20:15:42 +0100 Subject: [PATCH 0396/1003] Coverage: add vlog4j-client --- coverage/pom.xml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/coverage/pom.xml b/coverage/pom.xml index c3f0ccadb..4cbd79d9b 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -43,6 +43,11 @@ vlog4j-parser 0.5.0-SNAPSHOT + + org.semanticweb.vlog4j + vlog4j-client + 0.5.0-SNAPSHOT + From 1823d24f2990d3e8abd40b8e4e8bb39be0388fa4 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 25 Nov 2019 21:59:50 +0100 Subject: [PATCH 0397/1003] Coverage: exclude vlog4j-examples --- coverage/pom.xml | 5 ----- 1 file changed, 5 deletions(-) diff --git a/coverage/pom.xml b/coverage/pom.xml index 4cbd79d9b..e158f1d91 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -23,11 +23,6 @@ vlog4j-rdf 0.5.0-SNAPSHOT - - org.semanticweb.vlog4j - vlog4j-examples - 0.5.0-SNAPSHOT - org.semanticweb.vlog4j vlog4j-owlapi From 72e81023f2bc4b545caa5d4c159052e85c02a6c2 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Dec 2019 14:33:07 +0100 Subject: [PATCH 0398/1003] Rename client package to org.semanticweb.vlog4j.client --- vlog4j-client/pom.xml | 2 +- .../vlog4j/client/picocli/ClientUtils.java | 10 +++++----- .../vlog4j/client/picocli/PrintQueryResults.java | 14 +++++++------- .../vlog4j/client/picocli/SaveModel.java | 12 ++++++------ .../vlog4j/client/picocli/SaveQueryResults.java | 12 ++++++------ .../vlog4j/client/picocli/VLog4jClient.java | 8 ++++---- .../client/picocli/VLog4jClientMaterialize.java | 8 ++++---- .../client/picocli/PrintQueryResultsTest.java | 8 ++++---- .../vlog4j/client/picocli/SaveModelTest.java | 8 ++++---- .../client/picocli/SaveQueryResultsTest.java | 6 +++--- 10 files changed, 44 insertions(+), 44 deletions(-) rename vlog4j-client/src/main/java/org/{ => semanticweb}/vlog4j/client/picocli/ClientUtils.java (98%) rename vlog4j-client/src/main/java/org/{ => semanticweb}/vlog4j/client/picocli/PrintQueryResults.java (97%) rename vlog4j-client/src/main/java/org/{ => semanticweb}/vlog4j/client/picocli/SaveModel.java (98%) rename vlog4j-client/src/main/java/org/{ => semanticweb}/vlog4j/client/picocli/SaveQueryResults.java (98%) rename vlog4j-client/src/main/java/org/{ => semanticweb}/vlog4j/client/picocli/VLog4jClient.java (95%) rename vlog4j-client/src/main/java/org/{ => semanticweb}/vlog4j/client/picocli/VLog4jClientMaterialize.java (99%) rename vlog4j-client/src/test/java/org/{ => semanticweb}/vlog4j/client/picocli/PrintQueryResultsTest.java (98%) rename vlog4j-client/src/test/java/org/{ => semanticweb}/vlog4j/client/picocli/SaveModelTest.java (99%) rename vlog4j-client/src/test/java/org/{ => semanticweb}/vlog4j/client/picocli/SaveQueryResultsTest.java (98%) diff --git a/vlog4j-client/pom.xml b/vlog4j-client/pom.xml index fbee05c25..f6f9d5169 100644 --- a/vlog4j-client/pom.xml +++ b/vlog4j-client/pom.xml @@ -67,7 +67,7 @@ - org.vlog4j.client.picocli.VLog4jClient + org.semanticweb.vlog4j.client.picocli.VLog4jClient diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/ClientUtils.java b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/ClientUtils.java similarity index 98% rename from vlog4j-client/src/main/java/org/vlog4j/client/picocli/ClientUtils.java rename to vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/ClientUtils.java index 5893f86c2..35b10b2b0 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/ClientUtils.java +++ b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/ClientUtils.java @@ -1,4 +1,4 @@ -package org.vlog4j.client.picocli; +package org.semanticweb.vlog4j.client.picocli; /*- * #%L @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -32,7 +32,7 @@ /** * Utility class for interacting with the vlog4j client. - * + * * @author dragoste * */ @@ -53,7 +53,7 @@ private ClientUtils() { * restrict the logging messages that are shown on the console or to change * their formatting. See the documentation of Log4J for details on how to do * this. - * + * * Note: The VLog C++ backend performs its own logging. The log-level for this * can be configured using * {@link Reasoner#setLogLevel(org.semanticweb.vlog4j.core.reasoner.LogLevel)}. diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/PrintQueryResults.java b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/PrintQueryResults.java similarity index 97% rename from vlog4j-client/src/main/java/org/vlog4j/client/picocli/PrintQueryResults.java rename to vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/PrintQueryResults.java index f80c226f2..b9d656f3d 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/PrintQueryResults.java +++ b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/PrintQueryResults.java @@ -1,4 +1,4 @@ -package org.vlog4j.client.picocli; +package org.semanticweb.vlog4j.client.picocli; /*- * #%L @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -24,7 +24,7 @@ /** * Helper class to print query results. - * + * * @author Larry Gonzalez * */ @@ -35,7 +35,7 @@ public class PrintQueryResults { /** * If true, Vlog4jClient will print the size of the query result. Mutually * exclusive with {@code --print-complete-query-result} - * + * * @default true */ @Option(names = "--print-query-result-size", description = "Boolean. If true, Vlog4jClient will print the size of the query result. True by default.") @@ -44,7 +44,7 @@ public class PrintQueryResults { /** * If true, Vlog4jClient will print the query result in stdout. Mutually * exclusive with {@code --print-query-result-size} - * + * * @default false */ @Option(names = "--print-complete-query-result", description = "Boolean. If true, Vlog4jClient will print the query result in stdout. False by default.") @@ -61,7 +61,7 @@ public PrintQueryResults(final boolean sizeOnly, final boolean complete) { /** * Check correct configuration of the class. @code{--print-query-result-size} * and @code{--print-query-result} are mutually exclusive. - * + * * @return @code{true} if configuration is valid. */ public boolean isValid() { diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveModel.java b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveModel.java similarity index 98% rename from vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveModel.java rename to vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveModel.java index fb03cf117..12be0e9d0 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveModel.java +++ b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveModel.java @@ -1,4 +1,4 @@ -package org.vlog4j.client.picocli; +package org.semanticweb.vlog4j.client.picocli; /*- * #%L @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -26,7 +26,7 @@ /** * Helper class to save the resulting model of the materialization process. - * + * * @author Larry Gonzalez * */ @@ -65,7 +65,7 @@ public SaveModel(final boolean saveModel, final String outputDir) { /** * Check correct configuration of the class. If @code{--save-model} is true, * then a non-empty @code{--output-model-directory} is required. - * + * * @return @code{true} if configuration is valid. */ public boolean isConfigurationValid() { @@ -74,7 +74,7 @@ public boolean isConfigurationValid() { /** * Check that the path to store the model is either non-existing or a directory. - * + * * @return @code{true} if conditions are satisfied. */ public boolean isDirectoryValid() { diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveQueryResults.java b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveQueryResults.java similarity index 98% rename from vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveQueryResults.java rename to vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveQueryResults.java index 441aa359f..ce0345bd0 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveQueryResults.java +++ b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveQueryResults.java @@ -1,4 +1,4 @@ -package org.vlog4j.client.picocli; +package org.semanticweb.vlog4j.client.picocli; /*- * #%L @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -26,7 +26,7 @@ /** * Helper class to save query results. - * + * * @author Larry Gonzalez * */ @@ -66,7 +66,7 @@ public SaveQueryResults(final boolean saveResults, final String outputDir) { /** * Check correct configuration of the class. If @code{--save-query-results} is * true, then a non-empty @code{--output-query-result-directory} is required. - * + * * @return @code{true} if configuration is valid. */ public boolean isConfigurationValid() { @@ -77,7 +77,7 @@ public boolean isConfigurationValid() { /** * Check that the path to store the query results is either non-existing or a * directory. - * + * * @return @code{true} if conditions are satisfied. */ public boolean isDirectoryValid() { diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClient.java b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClient.java similarity index 95% rename from vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClient.java rename to vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClient.java index f35f01fb6..a0535ec79 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClient.java +++ b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClient.java @@ -1,4 +1,4 @@ -package org.vlog4j.client.picocli; +package org.semanticweb.vlog4j.client.picocli; /*- * #%L @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -26,7 +26,7 @@ /** * Stand alone client for VLog4j. - * + * * @author Larry Gonzalez * */ diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java similarity index 99% rename from vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java rename to vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java index 2cb915dac..af76e931a 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java +++ b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java @@ -1,4 +1,4 @@ -package org.vlog4j.client.picocli; +package org.semanticweb.vlog4j.client.picocli; /*- * #%L @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -40,7 +40,7 @@ /** * Class to implement a command to execute full materialization. - * + * * @author Larry Gonzalez * */ diff --git a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/PrintQueryResultsTest.java b/vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/PrintQueryResultsTest.java similarity index 98% rename from vlog4j-client/src/test/java/org/vlog4j/client/picocli/PrintQueryResultsTest.java rename to vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/PrintQueryResultsTest.java index b949a5ff3..167b66fb4 100644 --- a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/PrintQueryResultsTest.java +++ b/vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/PrintQueryResultsTest.java @@ -1,4 +1,4 @@ -package org.vlog4j.client.picocli; +package org.semanticweb.vlog4j.client.picocli; /*- * #%L @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -36,7 +36,7 @@ public class PrintQueryResultsTest { sizeTrueCompleteTrue.setComplete(true); sizeFalseCompleteFalse.setSizeOnly(false); } - + @Test public void isValid_sizeTrueCompleteFalse_valid() { // default configuration diff --git a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveModelTest.java b/vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/SaveModelTest.java similarity index 99% rename from vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveModelTest.java rename to vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/SaveModelTest.java index 5be484df7..17074eb37 100644 --- a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveModelTest.java +++ b/vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/SaveModelTest.java @@ -1,4 +1,4 @@ -package org.vlog4j.client.picocli; +package org.semanticweb.vlog4j.client.picocli; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -20,9 +20,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -42,7 +42,7 @@ public class SaveModelTest { private final static SaveModel saveFalseDefaultDir = new SaveModel(); private final static SaveModel saveFalseEmptyDir = new SaveModel(false, ""); private final static SaveModel saveFalseNullDir = new SaveModel(false, null); - + static { saveTrueDefaultDir.setSaveModel(true); saveFalseDefaultDir.setSaveModel(false); diff --git a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveQueryResultsTest.java b/vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/SaveQueryResultsTest.java similarity index 98% rename from vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveQueryResultsTest.java rename to vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/SaveQueryResultsTest.java index b7f684f61..124511f5d 100644 --- a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveQueryResultsTest.java +++ b/vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/SaveQueryResultsTest.java @@ -1,4 +1,4 @@ -package org.vlog4j.client.picocli; +package org.semanticweb.vlog4j.client.picocli; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -19,9 +19,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. From cf6811318dd776f0f560072e62ba721b620fc847 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 4 Oct 2019 17:25:35 +0200 Subject: [PATCH 0399/1003] Add editorconfig --- .editorconfig | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 .editorconfig diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 000000000..1b5ced5b4 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,13 @@ +root = true + +[*] +charset = utf-8 +end_of_line = lf +indent_style = tab +indent_size = 4 +insert_final_newline = true +trim_trailing_whitespace = true + +[*.md] +trim_trailing_whitespace = false +insert_final_newline = false From 386e803897db9a204fef982c10152cde9a0e5329 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 4 Oct 2019 00:26:19 +0200 Subject: [PATCH 0400/1003] Ignore TAGS file --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index b760aec6a..15e338099 100644 --- a/.gitignore +++ b/.gitignore @@ -54,3 +54,4 @@ vlog4j-examples/src/main/data/output/* vlog4j-examples/src/main/data/logs/* vlog4j-rdf/src/main/data/output/* /build-vlog/vlog/ +/TAGS From f8fa375ba0bf06cff3dc2a6f7fd46a1a29524aa6 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Dec 2019 14:36:41 +0100 Subject: [PATCH 0401/1003] Remove final on picocli @Options --- .../vlog4j/client/picocli/VLog4jClientMaterialize.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java index af76e931a..fd566f667 100644 --- a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java +++ b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java @@ -59,19 +59,19 @@ public class VLog4jClientMaterialize implements Runnable { // private List graalRuleFiles = new ArrayList<>(); @Option(names = "--log-level", description = "Log level of VLog (c++ library). One of: DEBUG, INFO, WARNING (default), ERROR.", required = false) - private final LogLevel logLevel = LogLevel.WARNING; + private LogLevel logLevel = LogLevel.WARNING; @Option(names = "--log-file", description = "Log file of VLog (c++ library). VLog will log to the default system output by default", required = false) private String logFile; @Option(names = "--chase-algorithm", description = "Chase algorithm. RESTRICTED_CHASE (default) or SKOLEM_CHASE.", required = false) - private final Algorithm chaseAlgorithm = Algorithm.RESTRICTED_CHASE; + private Algorithm chaseAlgorithm = Algorithm.RESTRICTED_CHASE; @Option(names = "--timeout", description = "Timeout in seconds. Infinite by default", required = false) - private final int timeout = 0; + private int timeout = 0; @Option(names = "--query", description = "Positive not-ground Literals to query after materialization in rls syntax. Vlog4jClient will print the size of its extension", required = true) - private final List queryStrings = new ArrayList<>(); + private List queryStrings = new ArrayList<>(); @ArgGroup(exclusive = false) private final PrintQueryResults printQueryResults = new PrintQueryResults(); From b917bb6877d4d7e62d0647c423f699fc1674ce78 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Dec 2019 14:58:21 +0100 Subject: [PATCH 0402/1003] Update travis configuration --- .editorconfig | 3 +++ .travis.yml | 32 +++++++++++++++++++++++--------- 2 files changed, 26 insertions(+), 9 deletions(-) diff --git a/.editorconfig b/.editorconfig index 1b5ced5b4..b8d1c9f09 100644 --- a/.editorconfig +++ b/.editorconfig @@ -11,3 +11,6 @@ trim_trailing_whitespace = true [*.md] trim_trailing_whitespace = false insert_final_newline = false + +[*.yml] +indent_style = space diff --git a/.travis.yml b/.travis.yml index 98555fd73..49a0fac52 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,21 +1,35 @@ language: java -jdk: - - openjdk8 -# - oraclejdk8 -# - oraclejdk9 +matrix: + include: + - os: linux + dist: bionic + jdk: + - openjdk11 + + - os: linux + dist: trusty + addons: + apt: + sources: + - ubuntu-toolchain-r-test + packages: + - g++-7 + jdk: + - openjdk8 + + - os: osx + ## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar -#before_install: - # - sudo apt-get install gcc-5 -y - # - eval “CC=gcc-5 && CXX=g++-5” - # - sh ./build-vlog-library.sh +before_install: + - sh ./build-vlog-library.sh install: mvn install $OPTIONS -DskipTests=true after_success: - mvn clean test jacoco:report coveralls:report -dist: trusty +dist: bionic sudo: false cache: From 7f5c2170b2142b25cb43018317be02e6c8a0adfd Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Dec 2019 15:45:32 +0100 Subject: [PATCH 0403/1003] Move modules into a profile to avoid cyclic dependencies `install-vlog-library.sh` calls `mvn initialize -Pdevelopment`, so we need to avoid having a module with a dependency on `vlog4j-core` in the reactor, since it has not been built yet. --- pom.xml | 45 +++-- vlog4j-core/LICENSE.txt | 402 ++++++++++++++++++++-------------------- 2 files changed, 232 insertions(+), 215 deletions(-) diff --git a/pom.xml b/pom.xml index bf1cdca27..05878fc60 100644 --- a/pom.xml +++ b/pom.xml @@ -14,20 +14,6 @@ A Java library for working with the VLog rule engine https://github.com/knowsys/vlog4j - - - vlog4j-core - vlog4j-rdf - vlog4j-examples - vlog4j-owlapi - vlog4j-graal - vlog4j-parser - vlog4j-client - coverage - - Apache License, Version 2.0 @@ -329,6 +315,37 @@ + + + client + + true + + + + vlog4j-core + vlog4j-rdf + vlog4j-examples + vlog4j-owlapi + vlog4j-graal + vlog4j-parser + vlog4j-client + coverage + + + + + development + + vlog4j-core + + sign diff --git a/vlog4j-core/LICENSE.txt b/vlog4j-core/LICENSE.txt index 29f81d812..261eeb9e9 100644 --- a/vlog4j-core/LICENSE.txt +++ b/vlog4j-core/LICENSE.txt @@ -1,201 +1,201 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. From 5ac1885fff58197310bf6a572656157892e7c4f6 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Dec 2019 16:07:23 +0100 Subject: [PATCH 0404/1003] Force gcc-7 for builds on trusty --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index 49a0fac52..b9435d927 100644 --- a/.travis.yml +++ b/.travis.yml @@ -16,6 +16,7 @@ matrix: - g++-7 jdk: - openjdk8 + env: CC=gcc-7 CXX=g++-7 - os: osx From 8baee23e8626fa50a597b545cb62828e8c3da2f3 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Dec 2019 16:25:52 +0100 Subject: [PATCH 0405/1003] Disable jacoco in development profile so we don't toggle modules --- pom.xml | 71 +++++++++++++++++++++++++++++---------------------------- 1 file changed, 36 insertions(+), 35 deletions(-) diff --git a/pom.xml b/pom.xml index 05878fc60..aadb0e2d2 100644 --- a/pom.xml +++ b/pom.xml @@ -14,6 +14,20 @@ A Java library for working with the VLog rule engine https://github.com/knowsys/vlog4j + + + vlog4j-core + vlog4j-rdf + vlog4j-examples + vlog4j-owlapi + vlog4j-graal + vlog4j-parser + vlog4j-client + + + Apache License, Version 2.0 @@ -266,9 +280,9 @@ test + coveralls plugin will not try to aggregate + this into the final coverage report, since we + want to control aggregation ourselves. --> ${project.reporting.outputDirectory}/jacoco-ut @@ -289,7 +303,7 @@ + docs for upload to github: javadoc:aggregate --> org.apache.maven.plugins maven-javadoc-plugin ${maven.javadoc.version} @@ -315,37 +329,6 @@
    - - - client - - true - - - - vlog4j-core - vlog4j-rdf - vlog4j-examples - vlog4j-owlapi - vlog4j-graal - vlog4j-parser - vlog4j-client - coverage - - - - - development - - vlog4j-core - - sign @@ -405,6 +388,24 @@ + + development + + + + + org.jacoco + jacoco-maven-plugin + + + prepare-agent + none + + + + + + From bb98795f17d272f4a35c77f06603201b8d993344 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Dec 2019 16:27:45 +0100 Subject: [PATCH 0406/1003] Go back to released vlog4j-base --- .travis.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index b9435d927..bacd83438 100644 --- a/.travis.yml +++ b/.travis.yml @@ -14,6 +14,7 @@ matrix: - ubuntu-toolchain-r-test packages: - g++-7 + - libstdc++6 jdk: - openjdk8 env: CC=gcc-7 CXX=g++-7 @@ -22,8 +23,8 @@ matrix: ## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar -before_install: - - sh ./build-vlog-library.sh +# before_install: +# - sh ./build-vlog-library.sh install: mvn install $OPTIONS -DskipTests=true From 9fa84f92e0ea8e1e2a77ca9ab9327b48752fab62 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Dec 2019 16:50:35 +0100 Subject: [PATCH 0407/1003] Also build on xenial --- .travis.yml | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index bacd83438..d6ce3d867 100644 --- a/.travis.yml +++ b/.travis.yml @@ -13,11 +13,14 @@ matrix: sources: - ubuntu-toolchain-r-test packages: - - g++-7 - - libstdc++6 + - gcc-5 + - g++-5 jdk: - openjdk8 - env: CC=gcc-7 CXX=g++-7 + env: CC=gcc-5 CXX=g++-5 + + - os: linux + dist: xenial - os: osx From 3f38a2c462e21f01b603e415c7fc492988f68470 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Dec 2019 17:48:01 +0100 Subject: [PATCH 0408/1003] Use gcc-6 on trusty --- .travis.yml | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index d6ce3d867..1832e1701 100644 --- a/.travis.yml +++ b/.travis.yml @@ -13,14 +13,24 @@ matrix: sources: - ubuntu-toolchain-r-test packages: - - gcc-5 - - g++-5 + - gcc-6 + - g++-6 + - libstdc++-6 jdk: - openjdk8 - env: CC=gcc-5 CXX=g++-5 + env: CC=gcc-6 CXX=g++-6 - os: linux dist: xenial + addons: + apt: + sources: + - ubuntu-toolchain-r-test + packages: + - gcc-7 + - g++-7 + - libstdc++-7 + env: CC=gcc-7 CXX=g++-7 - os: osx From 36fa43c0b736cc176f05d03e8b1085e7d95a97b7 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Dec 2019 17:53:10 +0100 Subject: [PATCH 0409/1003] Fix package name for libstdc++ --- .travis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 1832e1701..dcfeee2a7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -15,7 +15,7 @@ matrix: packages: - gcc-6 - g++-6 - - libstdc++-6 + - libstdc++6 jdk: - openjdk8 env: CC=gcc-6 CXX=g++-6 @@ -29,7 +29,7 @@ matrix: packages: - gcc-7 - g++-7 - - libstdc++-7 + - libstdc++6 env: CC=gcc-7 CXX=g++-7 - os: osx From 2d4c1ca3ee048511c0785d457ba329a4c2736a13 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Dec 2019 18:16:57 +0100 Subject: [PATCH 0410/1003] Use gcc-7 on trusty --- .travis.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index dcfeee2a7..e82d92701 100644 --- a/.travis.yml +++ b/.travis.yml @@ -13,12 +13,12 @@ matrix: sources: - ubuntu-toolchain-r-test packages: - - gcc-6 - - g++-6 + - gcc-7 + - g++-7 - libstdc++6 jdk: - openjdk8 - env: CC=gcc-6 CXX=g++-6 + env: CC=gcc-7 CXX=g++-7 - os: linux dist: xenial From e4bac2b5284cc0faa14c3e63268e031805bafa14 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Dec 2019 18:26:59 +0100 Subject: [PATCH 0411/1003] Don't forget to include coverage module --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index aadb0e2d2..98b1b8ce1 100644 --- a/pom.xml +++ b/pom.xml @@ -25,7 +25,7 @@ vlog4j-graal vlog4j-parser vlog4j-client - + coverage From a123f9ca59e58f6a4abdefcbb7abd8347ca477ca Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Dec 2019 18:28:09 +0100 Subject: [PATCH 0412/1003] Allow trusty to fail --- .travis.yml | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index e82d92701..ab63b97bf 100644 --- a/.travis.yml +++ b/.travis.yml @@ -13,12 +13,12 @@ matrix: sources: - ubuntu-toolchain-r-test packages: - - gcc-7 - - g++-7 + - gcc-6 + - g++-6 - libstdc++6 jdk: - openjdk8 - env: CC=gcc-7 CXX=g++-7 + env: CC=gcc-6 CXX=g++-6 - os: linux dist: xenial @@ -35,6 +35,11 @@ matrix: - os: osx +jobs: + allow_failures: + - dist: trusty + + ## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar # before_install: # - sh ./build-vlog-library.sh From 74b4783c8e6714c58421edf481c711928ac4cbc9 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 3 Dec 2019 13:10:42 +0100 Subject: [PATCH 0413/1003] Try gcc-6 on xenial --- .travis.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index ab63b97bf..1b1b9a53f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -27,10 +27,10 @@ matrix: sources: - ubuntu-toolchain-r-test packages: - - gcc-7 - - g++-7 + - gcc-6 + - g++-6 - libstdc++6 - env: CC=gcc-7 CXX=g++-7 + env: CC=gcc-6 CXX=g++-6 - os: osx From af7b5414c7537c55ec6c24f5c542e08d37125e9c Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 3 Dec 2019 16:44:21 +0100 Subject: [PATCH 0414/1003] fixes bug #139 + some refactoring --- .../core/model/implementation/Serializer.java | 252 ++++++++++++------ .../implementation/CsvFileDataSource.java | 3 +- .../SparqlQueryResultDataSource.java | 32 ++- .../core/model/DataSourceDeclarationTest.java | 81 ++++-- 4 files changed, 241 insertions(+), 127 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 122b9fc07..0f9778264 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -1,5 +1,6 @@ package org.semanticweb.vlog4j.core.model.implementation; +import org.apache.commons.lang3.StringUtils; import org.semanticweb.vlog4j.core.model.api.AbstractConstant; /*- @@ -36,6 +37,10 @@ import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.UniversalVariable; +import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.FileDataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; /** * A utility class with static methods to obtain the correct parsable string @@ -55,18 +60,22 @@ public final class Serializer { public static final String CLOSING_PARENTHESIS = ")"; public static final String RULE_SEPARATOR = " :- "; public static final String AT = "@"; - public static final String SOURCE = "@source "; + public static final String DATA_SOURCE = "@source "; + public static final String CSV_FILE_DATA_SOURCE = "load-csv"; + private static final String RDF_FILE_DATA_SOURCE = "load-rdf"; + private static final String SPARQL_QUERY_RESULT_DATA_SOURCE = "sparql"; public static final String COLON = ": "; public static final String COLON_UNSPACED = ":"; public static final String CARET = "^"; public static final String LESS_THAN = "<"; public static final String MORE_THAN = ">"; public static final String QUOTE = "\""; - public static final String DOUBLE = "^[-+]?[0-9]+[.]?[0-9]*([eE][-+]?[0-9]+)?$"; - public static final String INTEGER = "^[-+]?\\d+$"; - public static final String DECIMAL = "^(\\d*\\.)?\\d+$"; - public static final String TRUE = "true"; - public static final String FALSE = "false"; + + public static final String REGEX_DOUBLE = "^[-+]?[0-9]+[.]?[0-9]*([eE][-+]?[0-9]+)?$"; + public static final String REGEX_INTEGER = "^[-+]?\\d+$"; + public static final String REGEX_DECIMAL = "^(\\d*\\.)?\\d+$"; + public static final String REGEX_TRUE = "true"; + public static final String REGEX_FALSE = "false"; /** * Constructor. @@ -75,28 +84,12 @@ private Serializer() { } - private static String escape(String string) { - return string.replace("\\", "\\\\").replace("\"", "\\\""); - } - - private static String checkRelativeAbsoluteIri(String string) { - if ((string.contains(COLON_UNSPACED) || string.matches(INTEGER) || string.matches(DOUBLE) - || string.matches(DECIMAL) || string.equals(TRUE) || string.equals(FALSE))) { - return LESS_THAN + string + MORE_THAN; - } else { - return string; - } - } - - private static String addQuote(String string) { - return QUOTE + string + QUOTE; - } - /** * Creates a String representation of a given {@link Rule}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param rule a {@link Rule}. + * @param rule + * a {@link Rule}. * @return String representation corresponding to a given {@link Rule}. * */ @@ -104,11 +97,34 @@ public static String getString(Rule rule) { return getString(rule.getHead()) + RULE_SEPARATOR + getString(rule.getBody()) + DOT; } + /** + * Creates a String representation of a given {@link Conjunction}. + * + * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @param conjunction + * a {@link Conjunction} + * @return String representation corresponding to a given {@link Conjunction}. + */ + public static String getString(Conjunction conjunction) { + final StringBuilder stringBuilder = new StringBuilder(); + boolean first = true; + for (final Literal literal : conjunction.getLiterals()) { + if (first) { + first = false; + } else { + stringBuilder.append(COMMA); + } + stringBuilder.append(getString(literal)); + } + return stringBuilder.toString(); + } + /** * Creates a String representation of a given {@link Literal}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param literal a {@link Literal} + * @param literal + * a {@link Literal} * @return String representation corresponding to a given {@link Literal}. */ public static String getString(Literal literal) { @@ -116,7 +132,7 @@ public static String getString(Literal literal) { if (literal.isNegated()) { stringBuilder.append(NEGATIVE_IDENTIFIER); } - stringBuilder.append(checkRelativeAbsoluteIri(literal.getPredicate().getName())).append(OPEN_PARENTHESIS); + stringBuilder.append(getIRIString(literal.getPredicate().getName())).append(OPEN_PARENTHESIS); boolean first = true; for (final Term term : literal.getArguments()) { if (first) { @@ -124,7 +140,7 @@ public static String getString(Literal literal) { } else { stringBuilder.append(COMMA); } - String string = term.getSyntacticRepresentation(); + final String string = term.getSyntacticRepresentation(); stringBuilder.append(string); } stringBuilder.append(CLOSING_PARENTHESIS); @@ -135,7 +151,8 @@ public static String getString(Literal literal) { * Creates a String representation of a given {@link Fact}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param fact a {@link Fact} + * @param fact + * a {@link Fact} * @return String representation corresponding to a given {@link Fact}. */ public static String getFactString(Fact fact) { @@ -146,18 +163,73 @@ public static String getFactString(Fact fact) { * Creates a String representation of a given {@link Constant}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param constant a {@link Constant} + * @param constant + * a {@link Constant} * @return String representation corresponding to a given {@link Constant}. */ public static String getString(AbstractConstant constant) { - return checkRelativeAbsoluteIri(constant.getName()); + return getIRIString(constant.getName()); + } + + /** + * Creates a String representation corresponding to the name of a given + * {@link LanguageStringConstant}. + * + * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @param languageStringConstant + * a {@link LanguageStringConstant} + * @return String representation corresponding to the name of a given + * {@link LanguageStringConstant}. + */ + public static String getConstantName(LanguageStringConstant languageStringConstant) { + return addQuotes(escape(languageStringConstant.getString())) + AT + languageStringConstant.getLanguageTag(); + } + + /** + * Creates a String representation corresponding to the name of a given + * {@link DatatypeConstant} without an IRI. + * + * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @param datatypeConstant + * a {@link DatatypeConstant} + * @return String representation corresponding to a given + * {@link DatatypeConstant}. + */ + public static String getString(DatatypeConstant datatypeConstant) { + if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_STRING)) { + return addQuotes(datatypeConstant.getLexicalValue()); + } else { + if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DECIMAL) + || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_INTEGER) + || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DOUBLE)) { + return datatypeConstant.getLexicalValue(); + } else { + return getConstantName(datatypeConstant); + } + } + } + + /** + * Creates a String representation corresponding to the name of a given + * {@link DatatypeConstant} including an IRI. + * + * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @param datatypeConstant + * a {@link DatatypeConstant} + * @return String representation corresponding to a given + * {@link DatatypeConstant}. + */ + public static String getConstantName(DatatypeConstant datatypeConstant) { + return addQuotes(escape(datatypeConstant.getLexicalValue())) + CARET + CARET + + addAngleBrackets(datatypeConstant.getDatatype()); } /** * Creates a String representation of a given {@link ExistentialVariable}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param existentialVariable a {@link ExistentialVariable} + * @param existentialVariable + * a {@link ExistentialVariable} * @return String representation corresponding to a given * {@link ExistentialVariable}. */ @@ -169,7 +241,8 @@ public static String getString(ExistentialVariable existentialVariable) { * Creates a String representation of a given {@link UniversalVariable}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param universalVariable a {@link UniversalVariable} + * @param universalVariable + * a {@link UniversalVariable} * @return String representation corresponding to a given * {@link UniversalVariable}. */ @@ -181,7 +254,8 @@ public static String getString(UniversalVariable universalVariable) { * Creates a String representation of a given {@link NamedNull}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param namedNull a {@link NamedNull} + * @param namedNull + * a {@link NamedNull} * @return String representation corresponding to a given {@link NamedNull}. */ public static String getString(NamedNull namedNull) { @@ -192,7 +266,8 @@ public static String getString(NamedNull namedNull) { * Creates a String representation of a given {@link Predicate}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param predicate a {@link Predicate} + * @param predicate + * a {@link Predicate} * @return String representation corresponding to a given {@link Predicate}. */ public static String getString(Predicate predicate) { @@ -203,86 +278,87 @@ public static String getString(Predicate predicate) { * Creates a String representation of a given {@link DataSourceDeclaration}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param dataSourceDeclaration a {@link DataSourceDeclaration} + * @param dataSourceDeclaration + * a {@link DataSourceDeclaration} * @return String representation corresponding to a given * {@link DataSourceDeclaration}. */ public static String getString(DataSourceDeclaration dataSourceDeclaration) { - return SOURCE + dataSourceDeclaration.getPredicate().getName() + OPEN_PARENTHESIS + return DATA_SOURCE + dataSourceDeclaration.getPredicate().getName() + OPEN_PARENTHESIS + dataSourceDeclaration.getPredicate().getArity() + CLOSING_PARENTHESIS + COLON + dataSourceDeclaration.getDataSource().getSyntacticRepresentation(); } /** - * Creates a String representation of a given {@link Conjunction}. + * Creates a String representation of a given {@link CsvFileDataSource}. * - * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param conjunction a {@link Conjunction} - * @return String representation corresponding to a given {@link Conjunction}. + * @see <"https://github.com/knowsys/vlog4j/wiki">. + * + * @param csvFileDataSource + * @return String representation corresponding to a given + * {@link CsvFileDataSource}. */ - public static String getString(Conjunction conjunction) { - final StringBuilder stringBuilder = new StringBuilder(); - boolean first = true; - for (final Literal literal : conjunction.getLiterals()) { - if (first) { - first = false; - } else { - stringBuilder.append(COMMA); - } - stringBuilder.append(getString(literal)); - } - return stringBuilder.toString(); + public static String getString(CsvFileDataSource csvFileDataSource) { + return CSV_FILE_DATA_SOURCE + OPEN_PARENTHESIS + getFileString(csvFileDataSource) + CLOSING_PARENTHESIS + + StringUtils.SPACE + DOT; } /** - * Creates a String representation corresponding to the name of a given - * {@link LanguageStringConstant}. + * Creates a String representation of a given {@link RdfFileDataSource}. * - * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param languageStringConstant a {@link LanguageStringConstant} - * @return String representation corresponding to the name of a given - * {@link LanguageStringConstant}. + * @see <"https://github.com/knowsys/vlog4j/wiki">. + * + * + * @param rdfFileDataSource + * @return String representation corresponding to a given + * {@link RdfFileDataSource}. */ - public static String getConstantName(LanguageStringConstant languageStringConstant) { - return addQuote(escape(languageStringConstant.getString())) + AT + languageStringConstant.getLanguageTag(); + public static String getString(RdfFileDataSource rdfFileDataSource) { + return RDF_FILE_DATA_SOURCE + OPEN_PARENTHESIS + getFileString(rdfFileDataSource) + CLOSING_PARENTHESIS + + StringUtils.SPACE + DOT; } /** - * Creates a String representation corresponding to the name of a given - * {@link DatatypeConstant} without an IRI. + * Creates a String representation of a given + * {@link SparqlQueryResultDataSource}. * - * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param datatypeConstant a {@link DatatypeConstant} + * @see <"https://github.com/knowsys/vlog4j/wiki">. + * + * + * @param dataSource * @return String representation corresponding to a given - * {@link DatatypeConstant}. + * {@link SparqlQueryResultDataSource}. */ - public static String getString(DatatypeConstant datatypeConstant) { - if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_STRING)) { - return addQuote(datatypeConstant.getLexicalValue()); - } else { - if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DECIMAL) - || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_INTEGER) - || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DOUBLE)) { - return datatypeConstant.getLexicalValue(); - } else { - return getConstantName(datatypeConstant); - } + public static String getString(SparqlQueryResultDataSource dataSource) { + return SPARQL_QUERY_RESULT_DATA_SOURCE + OPEN_PARENTHESIS + + addAngleBrackets(dataSource.getEndpoint().toString()) + COMMA + + addQuotes(dataSource.getQueryVariables()) + COMMA + addQuotes(dataSource.getQueryBody()) + + CLOSING_PARENTHESIS + StringUtils.SPACE + DOT; + } + + private static String getFileString(FileDataSource fileDataSource) { + return addQuotes(fileDataSource.getFile().toString()); + } + private static String getIRIString(String string) { + if (string.contains(COLON_UNSPACED) || string.matches(REGEX_INTEGER) || string.matches(REGEX_DOUBLE) + || string.matches(REGEX_DECIMAL) || string.equals(REGEX_TRUE) || string.equals(REGEX_FALSE)) { + return addAngleBrackets(string); + } else { + return string; } } - /** - * Creates a String representation corresponding to the name of a given - * {@link DatatypeConstant} including an IRI. - * - * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param datatypeConstant a {@link DatatypeConstant} - * @return String representation corresponding to a given - * {@link DatatypeConstant}. - */ - public static String getConstantName(DatatypeConstant datatypeConstant) { - return addQuote(escape(datatypeConstant.getLexicalValue())) + CARET + CARET + LESS_THAN - + datatypeConstant.getDatatype() + MORE_THAN; + private static String escape(String string) { + return string.replace("\\", "\\\\").replace("\"", "\\\""); + } + + private static String addQuotes(String string) { + return QUOTE + string + QUOTE; + } + + private static String addAngleBrackets(String string) { + return LESS_THAN + string + MORE_THAN; } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/CsvFileDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/CsvFileDataSource.java index 2fa42eb07..7998dd466 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/CsvFileDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/CsvFileDataSource.java @@ -25,6 +25,7 @@ import java.util.Arrays; import org.eclipse.jdt.annotation.NonNull; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; /** * An {@code CsvFileDataSource} stores facts in the CSV format inside a file of @@ -75,7 +76,7 @@ public String toString() { @Override public String getSyntacticRepresentation() { - return "load-csv(\"" + getFile() + "\") ."; + return Serializer.getString(this); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java index ace59318b..8eb8168b6 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java @@ -27,6 +27,7 @@ import org.apache.commons.lang3.Validate; import org.eclipse.jdt.annotation.NonNull; import org.semanticweb.vlog4j.core.model.api.Variable; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; /** * A SparqlQueryResultDataSource provide the results of a SPARQL query on a @@ -96,15 +97,15 @@ public SparqlQueryResultDataSource(@NonNull final URL endpoint, } public URL getEndpoint() { - return endpoint; + return this.endpoint; } public String getQueryBody() { - return queryBody; + return this.queryBody; } public String getQueryVariables() { - return queryVariables; + return this.queryVariables; } @Override @@ -115,9 +116,9 @@ public final String toConfigString() { DATASOURCE_TYPE_CONFIG_PARAM + "=" + DATASOURCE_TYPE_CONFIG_VALUE + "\n" + - "EDB%1$d_param0=" + endpoint + "\n" + "EDB%1$d_param1=" + queryVariables + "\n" + + "EDB%1$d_param0=" + this.endpoint + "\n" + "EDB%1$d_param1=" + this.queryVariables + "\n" + - "EDB%1$d_param2=" + queryBody + "\n"; + "EDB%1$d_param2=" + this.queryBody + "\n"; return configStringPattern; } @@ -138,20 +139,23 @@ static String getQueryVariablesList(LinkedHashSet queryVariables) { public int hashCode() { final int prime = 31; int result = 1; - result = prime * result + endpoint.hashCode(); - result = prime * result + queryBody.hashCode(); - result = prime * result + queryVariables.hashCode(); + result = prime * result + this.endpoint.hashCode(); + result = prime * result + this.queryBody.hashCode(); + result = prime * result + this.queryVariables.hashCode(); return result; } @Override public boolean equals(Object obj) { - if (this == obj) + if (this == obj) { return true; - if (obj == null) + } + if (obj == null) { return false; - if (getClass() != obj.getClass()) + } + if (getClass() != obj.getClass()) { return false; + } final SparqlQueryResultDataSource other = (SparqlQueryResultDataSource) obj; return this.endpoint.equals(other.getEndpoint()) && this.queryVariables.equals(other.getQueryVariables()) && this.queryBody.equals(other.getQueryBody()); @@ -159,13 +163,13 @@ public boolean equals(Object obj) { @Override public String toString() { - return "SparqlQueryResultDataSource [endpoint=" + endpoint + ", queryVariables=" + queryVariables - + ", queryBody=" + queryBody + "]"; + return "SparqlQueryResultDataSource [endpoint=" + this.endpoint + ", queryVariables=" + this.queryVariables + + ", queryBody=" + this.queryBody + "]"; } @Override public String getSyntacticRepresentation() { - return "sparql(" + "<" + endpoint + ">" + ", \"" + queryVariables + "\"" + ", \"" + queryBody + "\") ."; + return Serializer.getString(this); } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java index 9ef7ef77d..5f6df4244 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java @@ -28,6 +28,7 @@ import java.net.MalformedURLException; import java.net.URL; +import org.junit.Ignore; import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; @@ -35,29 +36,28 @@ import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.FileDataSourceTestUtils; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; public class DataSourceDeclarationTest { @Test - public void equalityTest() throws MalformedURLException { - DataSource dataSource1 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var", + public void testEquality() throws MalformedURLException { + final DataSource dataSource1 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var", "?var wdt:P31 wd:Q5 ."); - Predicate predicate1 = Expressions.makePredicate("p", 3); - DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource1); - DataSource dataSource2 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var", + final Predicate predicate1 = Expressions.makePredicate("p", 3); + final DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource1); + final DataSource dataSource2 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var", "?var wdt:P31 wd:Q5 ."); - Predicate predicate2 = Expressions.makePredicate("p", 3); - DataSourceDeclaration dataSourceDeclaration2 = new DataSourceDeclarationImpl(predicate2, dataSource2); + final Predicate predicate2 = Expressions.makePredicate("p", 3); + final DataSourceDeclaration dataSourceDeclaration2 = new DataSourceDeclarationImpl(predicate2, dataSource2); - DataSource dataSource3 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var2", + final DataSource dataSource3 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var2", "?var2 wdt:P31 wd:Q5 ."); - DataSourceDeclaration dataSourceDeclaration3 = new DataSourceDeclarationImpl(predicate2, dataSource3); + final DataSourceDeclaration dataSourceDeclaration3 = new DataSourceDeclarationImpl(predicate2, dataSource3); - Predicate predicate4 = Expressions.makePredicate("q", 1); - DataSourceDeclaration dataSourceDeclaration4 = new DataSourceDeclarationImpl(predicate4, dataSource2); + final Predicate predicate4 = Expressions.makePredicate("q", 1); + final DataSourceDeclaration dataSourceDeclaration4 = new DataSourceDeclarationImpl(predicate4, dataSource2); assertEquals(dataSourceDeclaration1, dataSourceDeclaration1); assertEquals(dataSourceDeclaration1, dataSourceDeclaration2); @@ -69,24 +69,57 @@ public void equalityTest() throws MalformedURLException { } @Test - public void DataSourceDeclarationToStringTest() throws IOException { - final String csvFile = FileDataSourceTestUtils.INPUT_FOLDER + "file.csv"; - final File unzippedRdfFile = new File(FileDataSourceTestUtils.INPUT_FOLDER + "file.nt"); - Predicate predicate1 = Expressions.makePredicate("p", 3); - Predicate predicate2 = Expressions.makePredicate("q", 1); + public void toString_SparqlQueryResultDataSource() throws IOException { + final Predicate predicate = Expressions.makePredicate("p", 3); final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource( new URL("https://example.org/sparql"), "var", "?var wdt:P31 wd:Q5 ."); - final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File(csvFile)); - final RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(unzippedRdfFile); - final DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource); + + final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, dataSource); + assertEquals("@source p(3): sparql(, \"var\", \"?var wdt:P31 wd:Q5 .\") .", + dataSourceDeclaration.toString()); + + } + + @Test + public void toString_CsvFileDataSource() throws IOException { + final Predicate predicate2 = Expressions.makePredicate("q", 1); + final String relativeDirName = "dir"; + final String fileName = "file.csv"; + + final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File(relativeDirName, fileName)); final DataSourceDeclaration dataSourceDeclaration2 = new DataSourceDeclarationImpl(predicate2, unzippedCsvFileDataSource); + + final String expectedFilePath = relativeDirName + File.separator + fileName; + assertEquals("@source q(1): load-csv(\"" + expectedFilePath + "\") .", dataSourceDeclaration2.toString()); + } + + // TODO: have String representation of files OS independent + @Ignore + @Test + public void toString_CsvFileDataSource_absolutePath_windowsPathSeparator() throws IOException { + final Predicate predicate2 = Expressions.makePredicate("q", 1); + // "D:\\VLOG\\java-api-applications\\vlog4j\\vlog4j\\vlog4j-core\\src\\test\\data\input\\file.csv"; + final String absoluteFilePathWindows = "D:\\input\\file.csv"; + final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File(absoluteFilePathWindows)); + final DataSourceDeclaration dataSourceDeclaration2 = new DataSourceDeclarationImpl(predicate2, + unzippedCsvFileDataSource); + assertEquals("@source q(1): load-csv(\"D:/input/file.csv\") .", + dataSourceDeclaration2.toString()); + } + + @Test + public void toString_RdfFileDataSource_relativePath() throws IOException { + final Predicate predicate2 = Expressions.makePredicate("q", 1); + final String relativeDirName = "dir"; + final String fileName = "file.nt"; + final File unzippedRdfFile = new File(relativeDirName, fileName); + final RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(unzippedRdfFile); final DataSourceDeclaration dataSourceDeclaration3 = new DataSourceDeclarationImpl(predicate2, unzippedRdfFileDataSource); - assertEquals("@source p(3): sparql(, \"var\", \"?var wdt:P31 wd:Q5 .\") .", - dataSourceDeclaration1.toString()); - assertEquals("@source q(1): load-csv(\"src/test/data/input/file.csv\") .", dataSourceDeclaration2.toString()); - assertEquals("@source q(1): load-rdf(\"src/test/data/input/file.nt\") .", dataSourceDeclaration3.toString()); + final String expectedFilePath = relativeDirName + File.separator + fileName; + assertEquals("@source q(1): load-rdf(\"" + expectedFilePath + "\") .", + dataSourceDeclaration3.toString()); } } From 94cac64e7c45a12455182fedd253642600694d1d Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 3 Dec 2019 22:49:53 +0100 Subject: [PATCH 0415/1003] Fix statement Entity syntactic representation to end with " ." --- .../core/model/implementation/Serializer.java | 113 ++++++++---------- .../implementation/InMemoryDataSource.java | 8 +- .../implementation/RdfFileDataSource.java | 5 +- .../core/model/DataSourceDeclarationTest.java | 21 ++-- .../vlog4j/core/model/FactTest.java | 18 +-- .../vlog4j/core/model/RuleImplTest.java | 6 +- 6 files changed, 76 insertions(+), 95 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 0f9778264..ce1d58990 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -1,6 +1,5 @@ package org.semanticweb.vlog4j.core.model.implementation; -import org.apache.commons.lang3.StringUtils; import org.semanticweb.vlog4j.core.model.api.AbstractConstant; /*- @@ -50,9 +49,9 @@ * */ public final class Serializer { - public static final String NEGATIVE_IDENTIFIER = "~"; + public static final String STATEMENT_SEPARATOR = " ."; public static final String COMMA = ", "; - public static final String DOT = "."; + public static final String NEGATIVE_IDENTIFIER = "~"; public static final String EXISTENTIAL_IDENTIFIER = "!"; public static final String UNIVERSAL_IDENTIFIER = "?"; public static final String NAMEDNULL_IDENTIFIER = "_"; @@ -64,9 +63,9 @@ public final class Serializer { public static final String CSV_FILE_DATA_SOURCE = "load-csv"; private static final String RDF_FILE_DATA_SOURCE = "load-rdf"; private static final String SPARQL_QUERY_RESULT_DATA_SOURCE = "sparql"; - public static final String COLON = ": "; - public static final String COLON_UNSPACED = ":"; - public static final String CARET = "^"; + public static final String DATA_SOURCE_SEPARATOR = ": "; + public static final String COLON = ":"; + public static final String DOUBLE_CARET = "^^"; public static final String LESS_THAN = "<"; public static final String MORE_THAN = ">"; public static final String QUOTE = "\""; @@ -88,24 +87,22 @@ private Serializer() { * Creates a String representation of a given {@link Rule}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param rule - * a {@link Rule}. + * @param rule a {@link Rule}. * @return String representation corresponding to a given {@link Rule}. * */ - public static String getString(Rule rule) { - return getString(rule.getHead()) + RULE_SEPARATOR + getString(rule.getBody()) + DOT; + public static String getString(final Rule rule) { + return getString(rule.getHead()) + RULE_SEPARATOR + getString(rule.getBody()) + STATEMENT_SEPARATOR; } /** * Creates a String representation of a given {@link Conjunction}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param conjunction - * a {@link Conjunction} + * @param conjunction a {@link Conjunction} * @return String representation corresponding to a given {@link Conjunction}. */ - public static String getString(Conjunction conjunction) { + public static String getString(final Conjunction conjunction) { final StringBuilder stringBuilder = new StringBuilder(); boolean first = true; for (final Literal literal : conjunction.getLiterals()) { @@ -123,11 +120,10 @@ public static String getString(Conjunction conjunction) { * Creates a String representation of a given {@link Literal}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param literal - * a {@link Literal} + * @param literal a {@link Literal} * @return String representation corresponding to a given {@link Literal}. */ - public static String getString(Literal literal) { + public static String getString(final Literal literal) { final StringBuilder stringBuilder = new StringBuilder(""); if (literal.isNegated()) { stringBuilder.append(NEGATIVE_IDENTIFIER); @@ -151,23 +147,21 @@ public static String getString(Literal literal) { * Creates a String representation of a given {@link Fact}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param fact - * a {@link Fact} + * @param fact a {@link Fact} * @return String representation corresponding to a given {@link Fact}. */ - public static String getFactString(Fact fact) { - return getString(fact) + DOT; + public static String getFactString(final Fact fact) { + return getString(fact) + STATEMENT_SEPARATOR; } /** * Creates a String representation of a given {@link Constant}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param constant - * a {@link Constant} + * @param constant a {@link Constant} * @return String representation corresponding to a given {@link Constant}. */ - public static String getString(AbstractConstant constant) { + public static String getString(final AbstractConstant constant) { return getIRIString(constant.getName()); } @@ -176,12 +170,11 @@ public static String getString(AbstractConstant constant) { * {@link LanguageStringConstant}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param languageStringConstant - * a {@link LanguageStringConstant} + * @param languageStringConstant a {@link LanguageStringConstant} * @return String representation corresponding to the name of a given * {@link LanguageStringConstant}. */ - public static String getConstantName(LanguageStringConstant languageStringConstant) { + public static String getConstantName(final LanguageStringConstant languageStringConstant) { return addQuotes(escape(languageStringConstant.getString())) + AT + languageStringConstant.getLanguageTag(); } @@ -190,12 +183,11 @@ public static String getConstantName(LanguageStringConstant languageStringConsta * {@link DatatypeConstant} without an IRI. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param datatypeConstant - * a {@link DatatypeConstant} + * @param datatypeConstant a {@link DatatypeConstant} * @return String representation corresponding to a given * {@link DatatypeConstant}. */ - public static String getString(DatatypeConstant datatypeConstant) { + public static String getString(final DatatypeConstant datatypeConstant) { if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_STRING)) { return addQuotes(datatypeConstant.getLexicalValue()); } else { @@ -214,13 +206,12 @@ public static String getString(DatatypeConstant datatypeConstant) { * {@link DatatypeConstant} including an IRI. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param datatypeConstant - * a {@link DatatypeConstant} + * @param datatypeConstant a {@link DatatypeConstant} * @return String representation corresponding to a given * {@link DatatypeConstant}. */ - public static String getConstantName(DatatypeConstant datatypeConstant) { - return addQuotes(escape(datatypeConstant.getLexicalValue())) + CARET + CARET + public static String getConstantName(final DatatypeConstant datatypeConstant) { + return addQuotes(escape(datatypeConstant.getLexicalValue())) + DOUBLE_CARET + addAngleBrackets(datatypeConstant.getDatatype()); } @@ -228,12 +219,11 @@ public static String getConstantName(DatatypeConstant datatypeConstant) { * Creates a String representation of a given {@link ExistentialVariable}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param existentialVariable - * a {@link ExistentialVariable} + * @param existentialVariable a {@link ExistentialVariable} * @return String representation corresponding to a given * {@link ExistentialVariable}. */ - public static String getString(ExistentialVariable existentialVariable) { + public static String getString(final ExistentialVariable existentialVariable) { return EXISTENTIAL_IDENTIFIER + existentialVariable.getName(); } @@ -241,12 +231,11 @@ public static String getString(ExistentialVariable existentialVariable) { * Creates a String representation of a given {@link UniversalVariable}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param universalVariable - * a {@link UniversalVariable} + * @param universalVariable a {@link UniversalVariable} * @return String representation corresponding to a given * {@link UniversalVariable}. */ - public static String getString(UniversalVariable universalVariable) { + public static String getString(final UniversalVariable universalVariable) { return UNIVERSAL_IDENTIFIER + universalVariable.getName(); } @@ -254,11 +243,10 @@ public static String getString(UniversalVariable universalVariable) { * Creates a String representation of a given {@link NamedNull}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param namedNull - * a {@link NamedNull} + * @param namedNull a {@link NamedNull} * @return String representation corresponding to a given {@link NamedNull}. */ - public static String getString(NamedNull namedNull) { + public static String getString(final NamedNull namedNull) { return NAMEDNULL_IDENTIFIER + namedNull.getName(); } @@ -266,11 +254,10 @@ public static String getString(NamedNull namedNull) { * Creates a String representation of a given {@link Predicate}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param predicate - * a {@link Predicate} + * @param predicate a {@link Predicate} * @return String representation corresponding to a given {@link Predicate}. */ - public static String getString(Predicate predicate) { + public static String getString(final Predicate predicate) { return predicate.getName() + OPEN_PARENTHESIS + predicate.getArity() + CLOSING_PARENTHESIS; } @@ -278,15 +265,13 @@ public static String getString(Predicate predicate) { * Creates a String representation of a given {@link DataSourceDeclaration}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param dataSourceDeclaration - * a {@link DataSourceDeclaration} + * @param dataSourceDeclaration a {@link DataSourceDeclaration} * @return String representation corresponding to a given * {@link DataSourceDeclaration}. */ - public static String getString(DataSourceDeclaration dataSourceDeclaration) { - return DATA_SOURCE + dataSourceDeclaration.getPredicate().getName() + OPEN_PARENTHESIS - + dataSourceDeclaration.getPredicate().getArity() + CLOSING_PARENTHESIS + COLON - + dataSourceDeclaration.getDataSource().getSyntacticRepresentation(); + public static String getString(final DataSourceDeclaration dataSourceDeclaration) { + return DATA_SOURCE + getString(dataSourceDeclaration.getPredicate()) + DATA_SOURCE_SEPARATOR + + dataSourceDeclaration.getDataSource().getSyntacticRepresentation() + STATEMENT_SEPARATOR; } /** @@ -298,9 +283,8 @@ public static String getString(DataSourceDeclaration dataSourceDeclaration) { * @return String representation corresponding to a given * {@link CsvFileDataSource}. */ - public static String getString(CsvFileDataSource csvFileDataSource) { - return CSV_FILE_DATA_SOURCE + OPEN_PARENTHESIS + getFileString(csvFileDataSource) + CLOSING_PARENTHESIS - + StringUtils.SPACE + DOT; + public static String getString(final CsvFileDataSource csvFileDataSource) { + return CSV_FILE_DATA_SOURCE + OPEN_PARENTHESIS + getFileString(csvFileDataSource) + CLOSING_PARENTHESIS; } /** @@ -313,9 +297,8 @@ public static String getString(CsvFileDataSource csvFileDataSource) { * @return String representation corresponding to a given * {@link RdfFileDataSource}. */ - public static String getString(RdfFileDataSource rdfFileDataSource) { - return RDF_FILE_DATA_SOURCE + OPEN_PARENTHESIS + getFileString(rdfFileDataSource) + CLOSING_PARENTHESIS - + StringUtils.SPACE + DOT; + public static String getString(final RdfFileDataSource rdfFileDataSource) { + return RDF_FILE_DATA_SOURCE + OPEN_PARENTHESIS + getFileString(rdfFileDataSource) + CLOSING_PARENTHESIS; } /** @@ -329,19 +312,19 @@ public static String getString(RdfFileDataSource rdfFileDataSource) { * @return String representation corresponding to a given * {@link SparqlQueryResultDataSource}. */ - public static String getString(SparqlQueryResultDataSource dataSource) { + public static String getString(final SparqlQueryResultDataSource dataSource) { return SPARQL_QUERY_RESULT_DATA_SOURCE + OPEN_PARENTHESIS + addAngleBrackets(dataSource.getEndpoint().toString()) + COMMA + addQuotes(dataSource.getQueryVariables()) + COMMA + addQuotes(dataSource.getQueryBody()) - + CLOSING_PARENTHESIS + StringUtils.SPACE + DOT; + + CLOSING_PARENTHESIS; } - private static String getFileString(FileDataSource fileDataSource) { + private static String getFileString(final FileDataSource fileDataSource) { return addQuotes(fileDataSource.getFile().toString()); } - private static String getIRIString(String string) { - if (string.contains(COLON_UNSPACED) || string.matches(REGEX_INTEGER) || string.matches(REGEX_DOUBLE) + private static String getIRIString(final String string) { + if (string.contains(COLON) || string.matches(REGEX_INTEGER) || string.matches(REGEX_DOUBLE) || string.matches(REGEX_DECIMAL) || string.equals(REGEX_TRUE) || string.equals(REGEX_FALSE)) { return addAngleBrackets(string); } else { @@ -349,15 +332,15 @@ private static String getIRIString(String string) { } } - private static String escape(String string) { + private static String escape(final String string) { return string.replace("\\", "\\\\").replace("\"", "\\\""); } - private static String addQuotes(String string) { + private static String addQuotes(final String string) { return QUOTE + string + QUOTE; } - private static String addAngleBrackets(String string) { + private static String addAngleBrackets(final String string) { return LESS_THAN + string + MORE_THAN; } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java index 3bd708525..e498cacf2 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java @@ -94,15 +94,15 @@ public String[][] getData() { @Override public String getSyntacticRepresentation() { - StringBuilder facts = new StringBuilder( + StringBuilder sb = new StringBuilder( "This InMemoryDataSource holds the following tuples of constant names, one tuple per line:"); for (int i = 0; i < this.getData().length; i++) { for (int j = 0; j < data[i].length; j++) { - facts.append(data[i][j] + " "); + sb.append(data[i][j] + " "); } - facts.append("\n"); + sb.append("\n"); } - return facts.toString(); + return sb.toString(); } /** diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSource.java index ee5cc49ee..e65515dc7 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSource.java @@ -25,6 +25,7 @@ import java.util.Arrays; import org.eclipse.jdt.annotation.NonNull; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; /** * An {@code RdfFileDataSource} stores facts in the RDF N-Triples format inside @@ -68,12 +69,12 @@ public RdfFileDataSource(@NonNull final File rdfFile) throws IOException { @Override public String toString() { - return "RdfFileDataSource [rdfFile=" + getFile() + "]"; + return "RdfFileDataSource [rdfFile=" + this.getFile() + "]"; } @Override public String getSyntacticRepresentation() { - return "load-rdf(\"" + getFile() + "\") ."; + return Serializer.getString(this); } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java index 5f6df4244..caf805b82 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java @@ -87,39 +87,36 @@ public void toString_CsvFileDataSource() throws IOException { final String fileName = "file.csv"; final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File(relativeDirName, fileName)); - final DataSourceDeclaration dataSourceDeclaration2 = new DataSourceDeclarationImpl(predicate2, + final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate2, unzippedCsvFileDataSource); final String expectedFilePath = relativeDirName + File.separator + fileName; - assertEquals("@source q(1): load-csv(\"" + expectedFilePath + "\") .", dataSourceDeclaration2.toString()); + assertEquals("@source q(1): load-csv(\"" + expectedFilePath + "\") .", dataSourceDeclaration.toString()); } - // TODO: have String representation of files OS independent + // FIXME: have String representation of files OS independent @Ignore @Test public void toString_CsvFileDataSource_absolutePath_windowsPathSeparator() throws IOException { - final Predicate predicate2 = Expressions.makePredicate("q", 1); - // "D:\\VLOG\\java-api-applications\\vlog4j\\vlog4j\\vlog4j-core\\src\\test\\data\input\\file.csv"; + final Predicate predicate = Expressions.makePredicate("q", 1); final String absoluteFilePathWindows = "D:\\input\\file.csv"; final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File(absoluteFilePathWindows)); - final DataSourceDeclaration dataSourceDeclaration2 = new DataSourceDeclarationImpl(predicate2, + final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, unzippedCsvFileDataSource); - assertEquals("@source q(1): load-csv(\"D:/input/file.csv\") .", - dataSourceDeclaration2.toString()); + assertEquals("@source q(1): load-csv(\"D:/input/file.csv\") .", dataSourceDeclaration.toString()); } @Test public void toString_RdfFileDataSource_relativePath() throws IOException { - final Predicate predicate2 = Expressions.makePredicate("q", 1); + final Predicate predicate = Expressions.makePredicate("q", 1); final String relativeDirName = "dir"; final String fileName = "file.nt"; final File unzippedRdfFile = new File(relativeDirName, fileName); final RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(unzippedRdfFile); - final DataSourceDeclaration dataSourceDeclaration3 = new DataSourceDeclarationImpl(predicate2, + final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, unzippedRdfFileDataSource); final String expectedFilePath = relativeDirName + File.separator + fileName; - assertEquals("@source q(1): load-rdf(\"" + expectedFilePath + "\") .", - dataSourceDeclaration3.toString()); + assertEquals("@source q(1): load-rdf(\"" + expectedFilePath + "\") .", dataSourceDeclaration.toString()); } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java index a94cdf86d..0de3182d3 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java @@ -35,12 +35,12 @@ public class FactTest { @Test public void factsConstructor() { - Predicate p = Expressions.makePredicate("p", 2); - Constant c = Expressions.makeAbstractConstant("c"); - Constant d = Expressions.makeAbstractConstant("d"); - Fact f1 = Expressions.makeFact(p, Arrays.asList(c, d)); - Fact f2 = Expressions.makeFact("p", Arrays.asList(c, d)); - Fact f3 = new FactImpl(p, Arrays.asList(c, d)); + final Predicate p = Expressions.makePredicate("p", 2); + final Constant c = Expressions.makeAbstractConstant("c"); + final Constant d = Expressions.makeAbstractConstant("d"); + final Fact f1 = Expressions.makeFact(p, Arrays.asList(c, d)); + final Fact f2 = Expressions.makeFact("p", Arrays.asList(c, d)); + final Fact f3 = new FactImpl(p, Arrays.asList(c, d)); assertEquals(f1, f2); assertEquals(f1, f3); assertEquals(f2, f3); @@ -48,8 +48,8 @@ public void factsConstructor() { @Test(expected = IllegalArgumentException.class) public void factsOnlyContainConstants() { - Predicate p = Expressions.makePredicate("p", 1); - Variable x = Expressions.makeUniversalVariable("X"); + final Predicate p = Expressions.makePredicate("p", 1); + final Variable x = Expressions.makeUniversalVariable("X"); new FactImpl(p, Arrays.asList(x)); } @@ -59,7 +59,7 @@ public void factToStringTest() { final Constant c = Expressions.makeAbstractConstant("c"); final Constant d = Expressions.makeAbstractConstant("d"); final Fact f1 = Expressions.makeFact(p, Arrays.asList(c, d)); - assertEquals("p(c, d).", f1.toString()); + assertEquals("p(c, d) .", f1.toString()); } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java index 461a439f1..0a406ec18 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java @@ -152,7 +152,7 @@ public void ruleToStringTest() { final Variable y2 = Expressions.makeUniversalVariable("Y"); final Constant d = Expressions.makeAbstractConstant("d"); final Constant c = Expressions.makeAbstractConstant("c"); - LanguageStringConstantImpl s = new LanguageStringConstantImpl("Test", "en"); + final LanguageStringConstantImpl s = new LanguageStringConstantImpl("Test", "en"); final PositiveLiteral atom1 = Expressions.makePositiveLiteral("p", x, c); final PositiveLiteral atom2 = Expressions.makePositiveLiteral("p", x, z); final PositiveLiteral headAtom1 = Expressions.makePositiveLiteral("q", x, y); @@ -168,8 +168,8 @@ public void ruleToStringTest() { final Conjunction bodyConjunction = new ConjunctionImpl<>(LiteralList); final Rule rule1 = new RuleImpl(headPositiveLiterals, bodyLiterals); final Rule rule2 = new RuleImpl(headPositiveLiterals, bodyConjunction); - assertEquals("q(?X, !Y) :- p(?X, c), p(?X, ?Z).", rule1.toString()); - assertEquals("q(?X, !Y) :- p(?X, c), p(?Y, ?X), q(?X, d), ~r(?X, d), s(c, \"Test\"@en).", rule2.toString()); + assertEquals("q(?X, !Y) :- p(?X, c), p(?X, ?Z) .", rule1.toString()); + assertEquals("q(?X, !Y) :- p(?X, c), p(?Y, ?X), q(?X, d), ~r(?X, d), s(c, \"Test\"@en) .", rule2.toString()); } From 507d19475e4ccacab180be660dab9ab33c6595dd Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 3 Dec 2019 23:22:48 +0100 Subject: [PATCH 0416/1003] change vlog4j-base dependency version to release 1.3.2 --- vlog4j-core/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vlog4j-core/pom.xml b/vlog4j-core/pom.xml index 20e9568cb..a1c646e97 100644 --- a/vlog4j-core/pom.xml +++ b/vlog4j-core/pom.xml @@ -17,7 +17,7 @@ Core components of VLog4j: reasoner and model - 1.3.1 + 1.3.2 From ee7e9f86814465a6381160e48f133f4045b5ad37 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 4 Dec 2019 11:39:30 +0100 Subject: [PATCH 0417/1003] Don't build on trusty --- .travis.yml | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/.travis.yml b/.travis.yml index 1b1b9a53f..5921162f9 100644 --- a/.travis.yml +++ b/.travis.yml @@ -6,20 +6,6 @@ matrix: jdk: - openjdk11 - - os: linux - dist: trusty - addons: - apt: - sources: - - ubuntu-toolchain-r-test - packages: - - gcc-6 - - g++-6 - - libstdc++6 - jdk: - - openjdk8 - env: CC=gcc-6 CXX=g++-6 - - os: linux dist: xenial addons: @@ -49,7 +35,6 @@ install: mvn install $OPTIONS -DskipTests=true after_success: - mvn clean test jacoco:report coveralls:report -dist: bionic sudo: false cache: From efd048296b468c715d0cb2c03319d6dbfd8cbc9c Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 4 Dec 2019 11:40:17 +0100 Subject: [PATCH 0418/1003] Bump macOS to 10.14 --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index 5921162f9..fde2269c2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -19,6 +19,7 @@ matrix: env: CC=gcc-6 CXX=g++-6 - os: osx + osx_image: xcode10.2 jobs: From a0ad8f25a3348e510ff1db70a261d4b87d77e205 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Wed, 4 Dec 2019 14:43:04 +0100 Subject: [PATCH 0419/1003] Update RELEASE-NOTES before release --- RELEASE-NOTES.md | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 049a8e72e..0819972b3 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -8,18 +8,24 @@ Breaking changes: * The data model for rules has been refined and changed: * Instead of Constant, specific types of constants are used to capture abtract and data values * Instead of Variable, ExistentialVariable and UniversalVariable now indicate quantification - * Bland was renamed to NamedNull to avoid confusion with RDF blank nodes + * Blank was renamed to NamedNull to avoid confusion with RDF blank nodes * Methods to access terms now use Java Streams and are unified across syntactic objects New features: -* ... +* New module vlog4j-client provides a stand-alone command line client jar for VLog4j +* A wiki for VLog4j use and related publications has been created: https://github.com/knowsys/vlog4j/wiki Other improvements: * Data model is better aligned with syntax supported by parser +* Java object Statements (rules, facts, datasource declarations) String representation is parseable +* OWL API dependency has been upgraded from 4.5.1 to latest (5.1.11) +* SL4J dependency has been upgraded from 1.7.10 to latest (1.7.28) +* Cobertura test coverage tool has been replaced by JaCoCo Bugfixes: * Acyclicity checks work again without calling reason() first (issue #128) - +* in vlog4j-owlapi, class expressions of type ObjectMaxCardinality are not allowed in superclasses (issue #104) +* in vlog4j-owlapi, class expressions of type ObjectOneOf are only allowed as subclasses in axioms of type subClassOf (issue #20) VLog4j v0.4.0 ------------- From 15e16140fb3d4d8f0e178df209da5e5ba231b3b4 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Wed, 4 Dec 2019 14:55:47 +0100 Subject: [PATCH 0420/1003] Update README * vlog4j-client module * link to wiki --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index a52fb12f7..11f0be918 100644 --- a/README.md +++ b/README.md @@ -26,6 +26,7 @@ You need to use Java 1.8 or above. Available modules include: * **vlog4j-graal**: support for converting rules, facts and queries from [Graal](http://graphik-team.github.io/graal/) API objects and [DLGP](http://graphik-team.github.io/graal/doc/dlgp) files * **vlog4j-rdf**: support for reading from RDF files in Java (not required for loading RDF directly during reasoning) * **vlog4j-owlapi**: support for converting rules from OWL ontology, loaded with the OWL API +* **vlog4j-client** stand-alone application that builds a command-line client for VLog4j The released packages use vlog4j-base, which packages system-dependent binaries for Linux, MacOS, and Windows, and should work out of the box with current versions of these systems. In case of problems, or if you are using the current development version, own binaries can be compiled as follows: @@ -38,6 +39,7 @@ Documentation * The module **vlog4j-examples** includes short example programs that demonstrate various features and use cases * The GitHub project **[VLog4j Example](https://github.com/knowsys/vlog4j-example)** shows how to use VLog4j in own Maven projects and can be used as a skeleton for own projects * [JavaDoc](https://knowsys.github.io/vlog4j/) is available online and through the Maven packages. +* VLog4j[Wiki](https://github.com/knowsys/vlog4j/wiki) is available online Development ----------- From 990f2fed89a42076b99ecea3d095c8c8412c9b76 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Wed, 4 Dec 2019 14:56:57 +0100 Subject: [PATCH 0421/1003] Update README --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 11f0be918..943612a18 100644 --- a/README.md +++ b/README.md @@ -26,7 +26,7 @@ You need to use Java 1.8 or above. Available modules include: * **vlog4j-graal**: support for converting rules, facts and queries from [Graal](http://graphik-team.github.io/graal/) API objects and [DLGP](http://graphik-team.github.io/graal/doc/dlgp) files * **vlog4j-rdf**: support for reading from RDF files in Java (not required for loading RDF directly during reasoning) * **vlog4j-owlapi**: support for converting rules from OWL ontology, loaded with the OWL API -* **vlog4j-client** stand-alone application that builds a command-line client for VLog4j +* **vlog4j-client**: stand-alone application that builds a command-line client for VLog4j The released packages use vlog4j-base, which packages system-dependent binaries for Linux, MacOS, and Windows, and should work out of the box with current versions of these systems. In case of problems, or if you are using the current development version, own binaries can be compiled as follows: @@ -39,7 +39,7 @@ Documentation * The module **vlog4j-examples** includes short example programs that demonstrate various features and use cases * The GitHub project **[VLog4j Example](https://github.com/knowsys/vlog4j-example)** shows how to use VLog4j in own Maven projects and can be used as a skeleton for own projects * [JavaDoc](https://knowsys.github.io/vlog4j/) is available online and through the Maven packages. -* VLog4j[Wiki](https://github.com/knowsys/vlog4j/wiki) is available online +* A VLog4j [Wiki](https://github.com/knowsys/vlog4j/wiki) is available online. Development ----------- From ed11e8c0dfaee02b978b30dece51dc04ed235abc Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Wed, 4 Dec 2019 15:09:09 +0100 Subject: [PATCH 0422/1003] update README with description about wiki --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 943612a18..19d2290a2 100644 --- a/README.md +++ b/README.md @@ -26,7 +26,7 @@ You need to use Java 1.8 or above. Available modules include: * **vlog4j-graal**: support for converting rules, facts and queries from [Graal](http://graphik-team.github.io/graal/) API objects and [DLGP](http://graphik-team.github.io/graal/doc/dlgp) files * **vlog4j-rdf**: support for reading from RDF files in Java (not required for loading RDF directly during reasoning) * **vlog4j-owlapi**: support for converting rules from OWL ontology, loaded with the OWL API -* **vlog4j-client**: stand-alone application that builds a command-line client for VLog4j +* **vlog4j-client**: stand-alone application that builds a [command-line client](https://github.com/knowsys/vlog4j/wiki/Standalone-client) for VLog4j. The released packages use vlog4j-base, which packages system-dependent binaries for Linux, MacOS, and Windows, and should work out of the box with current versions of these systems. In case of problems, or if you are using the current development version, own binaries can be compiled as follows: @@ -39,7 +39,7 @@ Documentation * The module **vlog4j-examples** includes short example programs that demonstrate various features and use cases * The GitHub project **[VLog4j Example](https://github.com/knowsys/vlog4j-example)** shows how to use VLog4j in own Maven projects and can be used as a skeleton for own projects * [JavaDoc](https://knowsys.github.io/vlog4j/) is available online and through the Maven packages. -* A VLog4j [Wiki](https://github.com/knowsys/vlog4j/wiki) is available online. +* A VLog4j [Wiki](https://github.com/knowsys/vlog4j/wiki) is available online, with detailed information about vlog4j usage, the supported rule language, and related publications. Development ----------- From bce7689ea8743bbb82f808d3f7d809b802193839 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 1 Oct 2019 17:46:23 +0200 Subject: [PATCH 0423/1003] Parser: add generated source files to project This fixes, among other things, autocompletion and building the project using the eclipse LSP server. --- vlog4j-parser/pom.xml | 39 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/vlog4j-parser/pom.xml b/vlog4j-parser/pom.xml index fa5b82280..6e3773633 100644 --- a/vlog4j-parser/pom.xml +++ b/vlog4j-parser/pom.xml @@ -44,6 +44,24 @@ + + org.codehaus.mojo + build-helper-maven-plugin + 1.8 + + + generate-sources + + + ${project.build.directory}/generated-sources/javacc/ + + + + add-source + + + + @@ -69,6 +87,27 @@ + + + org.codehaus.mojo + build-helper-maven-plugin + [1.0,) + + parse-version + add-source + maven-version + add-resource + add-test-resource + add-test-source + + + + + true + true + + + From 5b866be608eba641177ee6f02368e3dccef6639f Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 2 Oct 2019 14:57:57 +0200 Subject: [PATCH 0424/1003] Parser: split out tests relating to data sources --- .../parser/RuleParserDataSourceTest.java | 87 +++++++++++++++++++ .../vlog4j/syntax/parser/RuleParserTest.java | 57 +----------- 2 files changed, 89 insertions(+), 55 deletions(-) create mode 100644 vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java new file mode 100644 index 000000000..22700a0b0 --- /dev/null +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java @@ -0,0 +1,87 @@ +package org.semanticweb.vlog4j.syntax.parser; + +/*- + * #%L + * VLog4j Syntax + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.File; +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.ArrayList; +import java.util.Arrays; + +import org.junit.Test; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Statement; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.RuleParser; + +public class RuleParserDataSourceTest { + @Test + public void testCsvSource() throws ParsingException, IOException { + String input = "@source p(2) : load-csv(\"src/main/data/input/example.csv\") ."; + ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + CsvFileDataSource csvds = new CsvFileDataSource(new File("src/main/data/input/example.csv")); + Predicate p = Expressions.makePredicate("p", 2); + DataSourceDeclaration d = new DataSourceDeclarationImpl(p, csvds); + assertEquals(Arrays.asList(d), statements); + } + + @Test + public void testRdfSource() throws ParsingException, IOException { + String input = "@source p(3) : load-rdf(\"src/main/data/input/example.nt.gz\") ."; + ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + RdfFileDataSource rdfds = new RdfFileDataSource(new File("src/main/data/input/example.nt.gz")); + Predicate p = Expressions.makePredicate("p", 3); + DataSourceDeclaration d = new DataSourceDeclarationImpl(p, rdfds); + assertEquals(Arrays.asList(d), statements); + } + + @Test(expected = ParsingException.class) + public void testRdfSourceInvalidArity() throws ParsingException, IOException { + String input = "@source p(2) : load-rdf(\"src/main/data/input/example.nt.gz\") ."; + RuleParser.parse(input); + } + + @Test + public void testSparqlSource() throws ParsingException, MalformedURLException { + String input = "@source p(2) : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; + ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + SparqlQueryResultDataSource sparqlds = new SparqlQueryResultDataSource( + new URL("https://query.wikidata.org/sparql"), "disease, doid", "?disease wdt:P699 ?doid ."); + Predicate p = Expressions.makePredicate("p", 2); + DataSourceDeclaration d = new DataSourceDeclarationImpl(p, sparqlds); + assertEquals(Arrays.asList(d), statements); + } + + @Test(expected = ParsingException.class) + public void testSparqlSourceMalformedUrl() throws ParsingException, MalformedURLException { + String input = "@source p(2) : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; + RuleParser.parse(input); + } +} diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 8ba1fb964..6abc8647a 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,31 +21,21 @@ */ import static org.junit.Assert.assertEquals; -import java.io.File; -import java.io.IOException; -import java.net.MalformedURLException; -import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; @@ -351,49 +341,6 @@ public void tesLiteralError() throws ParsingException { RuleParser.parseLiteral(input); } - @Test - public void testCsvSource() throws ParsingException, IOException { - String input = "@source p(2) : load-csv(\"src/main/data/input/example.csv\") ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - CsvFileDataSource csvds = new CsvFileDataSource(new File("src/main/data/input/example.csv")); - Predicate p = Expressions.makePredicate("p", 2); - DataSourceDeclaration d = new DataSourceDeclarationImpl(p, csvds); - assertEquals(Arrays.asList(d), statements); - } - - @Test - public void testRdfSource() throws ParsingException, IOException { - String input = "@source p(3) : load-rdf(\"src/main/data/input/example.nt.gz\") ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - RdfFileDataSource rdfds = new RdfFileDataSource(new File("src/main/data/input/example.nt.gz")); - Predicate p = Expressions.makePredicate("p", 3); - DataSourceDeclaration d = new DataSourceDeclarationImpl(p, rdfds); - assertEquals(Arrays.asList(d), statements); - } - - @Test(expected = ParsingException.class) - public void testRdfSourceInvalidArity() throws ParsingException, IOException { - String input = "@source p(2) : load-rdf(\"src/main/data/input/example.nt.gz\") ."; - RuleParser.parse(input); - } - - @Test - public void testSparqlSource() throws ParsingException, MalformedURLException { - String input = "@source p(2) : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - SparqlQueryResultDataSource sparqlds = new SparqlQueryResultDataSource( - new URL("https://query.wikidata.org/sparql"), "disease, doid", "?disease wdt:P699 ?doid ."); - Predicate p = Expressions.makePredicate("p", 2); - DataSourceDeclaration d = new DataSourceDeclarationImpl(p, sparqlds); - assertEquals(Arrays.asList(d), statements); - } - - @Test(expected = ParsingException.class) - public void testSparqlSourceMalformedUrl() throws ParsingException, MalformedURLException { - String input = "@source p(2) : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; - RuleParser.parse(input); - } - @Test(expected = ParsingException.class) public void testBlankPrefixDeclaration() throws ParsingException { String input = "@prefix _: . s(c) ."; From 1e6e3dbe25c369587b8827f72da7e405aa58ed22 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 4 Oct 2019 00:26:52 +0200 Subject: [PATCH 0425/1003] Parser: allow registration of custom data sources --- .../parser/DataSourceDeclarationHandler.java | 43 ++++++++++ .../vlog4j/parser/ParserConfiguration.java | 82 +++++++++++++++++++ .../semanticweb/vlog4j/parser/RuleParser.java | 31 +++++++ .../vlog4j/parser/javacc/JavaCCParser.jj | 44 +++++++--- .../parser/javacc/JavaCCParserBase.java | 38 +++++++-- .../parser/RuleParserDataSourceTest.java | 27 ++++++ 6 files changed, 249 insertions(+), 16 deletions(-) create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java new file mode 100644 index 000000000..70a02b8de --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java @@ -0,0 +1,43 @@ +package org.semanticweb.vlog4j.parser; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + + +import org.semanticweb.vlog4j.core.model.api.DataSource; + +/** + * Handler for parsing a custom Data Source declaration. + * + * @author Maximilian Marx + */ +@FunctionalInterface +public interface DataSourceDeclarationHandler { + /** + * Parse a Data Source Declaration. + * + * @param arguments + * Arguments given to the Data Source declaration. + * + * @throws ParsingException when the given arguments are invalid for the Data Source. + * @return DataSource a DataSource instance. + */ + DataSource handleDeclaration(String[] arguments) throws ParsingException; +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java new file mode 100644 index 000000000..2b2dbb66f --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -0,0 +1,82 @@ +package org.semanticweb.vlog4j.parser; + +import java.util.HashMap; + +import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.parser.javacc.ParseException; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * Class to keep parser configuration. + * + * @author Maximilian Marx + */ +public class ParserConfiguration { + /** + * Register a new Data Source. + * + * @param name + * Name of the data source, as it appears in the declaring directive. + * + * @param handler + * Handler for parsing a data source declaration. + * + * @throws IllegalArgumentException if the provided name is already registered. + * @return this + */ + public ParserConfiguration registerDataSource(String name, DataSourceDeclarationHandler handler) throws IllegalArgumentException { + if (dataSources.containsKey(name)) { + throw new IllegalArgumentException("Data source \"" + name + "\" is already registered."); + } + + this.dataSources.put(name, handler); + return this; + } + + /** + * Parse a Data Source declaration. + * + * @param name + * Name of the data source. + * + * @param args + * arguments given in the data source declaration. + * + * @throws ParsingException when the declaration is invalid, e.g., if the Data Source is not known. + * + * @return the Data Source instance. + */ + public DataSource parseDataSourceDeclaration(String name, String[] args) throws ParsingException { + DataSourceDeclarationHandler handler = dataSources.get(name); + + if (handler == null) { + throw new ParsingException("Data source \"" + name + "\" is not known."); + } + + return handler.handleDeclaration(args); + } + + /** + * The registered data sources. + */ + HashMap dataSources = new HashMap<>(); +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index ffd6ec8cf..129537296 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -46,6 +46,22 @@ public class RuleParser { private static Logger LOGGER = LoggerFactory.getLogger(RuleParser.class); + public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, final String encoding, final ParserConfiguration parserConfiguration) throws ParsingException { + final JavaCCParser parser = new JavaCCParser(stream, encoding); + parser.setKnowledgeBase(knowledgeBase); + parser.setParserConfiguration(parserConfiguration); + doParse(parser); + } + + public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, final ParserConfiguration parserConfiguration) throws ParsingException { + parseInto(knowledgeBase, stream, "UTF-8", parserConfiguration); + } + + public static void parseInto(final KnowledgeBase knowledgeBase, final String input, final ParserConfiguration parserConfiguration) throws ParsingException { + final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); + parseInto(knowledgeBase, inputStream, "UTF-8", parserConfiguration); + } + public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, final String encoding) throws ParsingException { final JavaCCParser javaCcParser = new JavaCCParser(stream, encoding); @@ -62,6 +78,21 @@ public static void parseInto(final KnowledgeBase knowledgeBase, final String inp parseInto(knowledgeBase, inputStream, "UTF-8"); } + public static KnowledgeBase parse(final InputStream stream, final String encoding, final ParserConfiguration parserConfiguration) throws ParsingException { + JavaCCParser parser = new JavaCCParser(stream, encoding); + parser.setParserConfiguration(parserConfiguration); + return doParse(parser); + } + + public static KnowledgeBase parse(final InputStream stream, final ParserConfiguration parserConfiguration) throws ParsingException { + return parse(stream, "UTF-8", parserConfiguration); + } + + public static KnowledgeBase parse(final String input, final ParserConfiguration parserConfiguration) throws ParsingException { + final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); + return parse(inputStream, "UTF-8", parserConfiguration); + } + public static KnowledgeBase parse(final InputStream stream, final String encoding) throws ParsingException { return doParse(new JavaCCParser(stream, encoding)); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 9c3785d20..e58c47b5b 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -21,6 +21,7 @@ import java.net.MalformedURLException; import java.util.List; import java.util.ArrayList; +import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; @@ -109,11 +110,13 @@ DataSource dataSource() throws PrefixDeclarationException: String endpoint; String variables; String query; + Token sourceName; + String[] arguments; } { < LOADCSV > < LPAREN > fileName = String() < RPAREN > { - try { + try { return new CsvFileDataSource(new File(fileName)) ; } catch (IOException e) { throw new ParseException("Could not use source file \"" + fileName +"\": " + e.getMessage()); @@ -134,7 +137,15 @@ DataSource dataSource() throws PrefixDeclarationException: } catch (MalformedURLException e) { throw new ParseException("SPARQL endoint \"" + endpoint +"\" is not a valid URL: " + e.getMessage()); } - } + } +| sourceName = < DIRECTIVENAME > < LPAREN > arguments = Strings() < RPAREN > + { + try { + return parserConfiguration.parseDataSourceDeclaration(sourceName.image, arguments); + } catch (ParsingException e) { + throw new ParseException(e.getMessage()); + } + } } @@ -161,13 +172,13 @@ Rule rule() throws PrefixDeclarationException: { // check that the intersection between headExiVars and BodyVars is empty for (String variable : headExiVars) { - if (bodyVars.contains(variable)) + if (bodyVars.contains(variable)) throw new ParseException("Malformed rule " + head + " :- " + body + "\nExistential variable " + variable + " also used in rule body."); } // check that bodyVars contains headUniVars for (String variable : headUniVars) { - if (!bodyVars.contains(variable)) + if (!bodyVars.contains(variable)) throw new ParseException("Unsafe rule " + head + " :- " + body + "\nUniversal variable " + variable + " occurs in head but not in body."); } @@ -226,7 +237,7 @@ Fact fact(FormulaContext context) throws PrefixDeclarationException: { predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > { - try { + try { return Expressions.makeFact(predicateName, terms); } catch (IllegalArgumentException e) { throw new ParseException("Error parsing fact: " + e.toString()); @@ -352,6 +363,18 @@ String String(): } } +String[] Strings(): +{ + String str; + String[] rest = {}; +} +{ + str = String() [< COMMA > rest = Strings()] + { + return collectStrings(str, rest); + } +} + String IRI(boolean includeAngleBrackets) throws PrefixDeclarationException: { String iri; @@ -428,7 +451,7 @@ TOKEN: ( (< DIGITS >)+ "." (< DIGITS >)* | "." (< DIGITS >)+ - ) + ) > | < DOUBLE : ([ "+", "-" ])? @@ -436,7 +459,7 @@ TOKEN: ([ "0"-"9" ])+ "." ([ "0"-"9" ])* < EXPONENT > | "." ([ "0"-"9" ])+ (< EXPONENT >) | ([ "0"-"9" ])+ < EXPONENT > - ) + ) > | < #DIGITS : ([ "0"-"9" ])+ > | < #EXPONENT : [ "e", "E" ] ([ "+", "-" ])? ([ "0"-"9" ])+ > @@ -508,6 +531,8 @@ TOKEN : | < VARORPREDNAME : < A2Z> (< A2ZN >)* > | < #A2Z : [ "a"-"z", "A"-"Z" ] > | < #A2ZN : [ "a"-"z", "A"-"Z", "0"-"9" ] > +| < DIRECTIVENAME : < A2Z > (< A2ZNX >)* > +| < #A2ZNX : [ "a"-"z", "A"-"Z", "0"-"9", "-", "_" ] > } TOKEN : @@ -539,10 +564,10 @@ TOKEN : | [ "\u2070"-"\u218f" ] | [ "\u2c00"-"\u2fef" ] | [ "\u3001"-"\ud7ff" ] - | [ "\uf900"-"\ufffd" ] + | [ "\uf900"-"\ufffd" ] > // | [ ""#x10000-#xEFFFF] -| +| < #PN_CHARS_U : < PN_CHARS_BASE > | "_" > @@ -579,4 +604,3 @@ TOKEN : < PN_CHARS > )? > } - diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index cfcfcd96a..7f0929e5f 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.parser.javacc; +import java.util.ArrayList; + /*- * #%L * vlog4j-parser @@ -9,9 +11,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,6 +23,7 @@ */ import java.util.HashSet; +import java.util.List; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSource; @@ -31,17 +34,18 @@ import org.semanticweb.vlog4j.core.model.implementation.LanguageStringConstantImpl; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.LocalPrefixDeclarations; +import org.semanticweb.vlog4j.parser.ParserConfiguration; import org.semanticweb.vlog4j.core.model.api.Predicate; /** * Basic methods used in the JavaCC-generated parser. - * + * * Implementation of some string escaping methods adapted from Apache Jena, * released under Apache 2.0 license terms. - * + * * @see https://github.com/apache/jena/blob/master/jena-core/src/main/java/org/apache/jena/n3/turtle/ParserBase.java - * + * * @author Markus Kroetzsch * @author Larry Gonzalez * @author Jena developers, Apache Software Foundation (ASF) @@ -51,6 +55,7 @@ public class JavaCCParserBase { final PrefixDeclarations prefixDeclarations = new LocalPrefixDeclarations(); KnowledgeBase knowledgeBase; + ParserConfiguration parserConfiguration; /** * "Local" variable to remember (universal) body variables during parsing. @@ -67,7 +72,7 @@ public class JavaCCParserBase { /** * Defines the context for parsing sub-formulas. - * + * * @author Markus Kroetzsch * */ @@ -86,6 +91,7 @@ public enum FormulaContext { public JavaCCParserBase() { this.knowledgeBase = new KnowledgeBase(); + this.parserConfiguration = new ParserConfiguration(); } Constant createIntegerConstant(String lexicalForm) { @@ -105,6 +111,17 @@ void addDataSource(String predicateName, int arity, DataSource dataSource) { knowledgeBase.addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); } + static String[] collectStrings(String str, String[] rest) { + ArrayList strings = new ArrayList<>(); + strings.add(str); + + for (String next : rest) { + strings.add(next); + } + + return strings.toArray(rest); + } + static String unescapeStr(String s, int line, int column) throws ParseException { return unescape(s, '\\', false, line, column); } @@ -230,4 +247,13 @@ public KnowledgeBase getKnowledgeBase() { return knowledgeBase; } + public void setParserConfiguration(ParserConfiguration parserConfiguration) { + this.parserConfiguration = parserConfiguration; + } + + public ParserConfiguration getParserConfiguration() { + return parserConfiguration; + } + + } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java index 22700a0b0..54f17c95a 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java @@ -21,6 +21,7 @@ */ import static org.junit.Assert.*; +import static org.mockito.Mockito.*; import java.io.File; import java.io.IOException; @@ -29,7 +30,10 @@ import java.util.ArrayList; import java.util.Arrays; +import javax.sql.DataSource; + import org.junit.Test; +import org.mockito.Matchers; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Statement; @@ -38,6 +42,8 @@ import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.vlog4j.parser.DataSourceDeclarationHandler; +import org.semanticweb.vlog4j.parser.ParserConfiguration; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; @@ -84,4 +90,25 @@ public void testSparqlSourceMalformedUrl() throws ParsingException, MalformedURL String input = "@source p(2) : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; RuleParser.parse(input); } + + @Test(expected = ParsingException.class) + public void testUnknownDataSource() throws ParsingException { + String input = "@source p(2) : unknown-data-source(\"hello, world\") ."; + RuleParser.parse(input); + } + + @Test + public void testCustomDataSource() throws ParsingException { + CsvFileDataSource source = mock(CsvFileDataSource.class); + DataSourceDeclarationHandler handler = mock(DataSourceDeclarationHandler.class); + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerDataSource("mock-source", handler); + doReturn(source).when(handler).handleDeclaration(Matchers.any()); + + String input = "@source p(2) : mock-source(\"hello\", \"world\") ."; + String[] expectedArguments = {"hello", "world"}; + RuleParser.parse(input, parserConfiguration); + + verify(handler).handleDeclaration(eq(expectedArguments)); + } } From 5684e8dbd6e1930d0e4e0cf5c6463b1e8f904555 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 11 Nov 2019 19:57:09 +0100 Subject: [PATCH 0426/1003] Bump mockito to 2.28.2 --- pom.xml | 2 +- .../vlog4j/syntax/parser/RuleParserDataSourceTest.java | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index 98b1b8ce1..07c9e2e46 100644 --- a/pom.xml +++ b/pom.xml @@ -64,7 +64,7 @@ UTF-8 2.1.100 4.12 - 1.10.19 + 2.28.2 1.7.28 3.9 1.5 diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java index 54f17c95a..8f2a3116a 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java @@ -33,7 +33,7 @@ import javax.sql.DataSource; import org.junit.Test; -import org.mockito.Matchers; +import org.mockito.ArgumentMatchers; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Statement; @@ -103,7 +103,7 @@ public void testCustomDataSource() throws ParsingException { DataSourceDeclarationHandler handler = mock(DataSourceDeclarationHandler.class); ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerDataSource("mock-source", handler); - doReturn(source).when(handler).handleDeclaration(Matchers.any()); + doReturn(source).when(handler).handleDeclaration(ArgumentMatchers.any()); String input = "@source p(2) : mock-source(\"hello\", \"world\") ."; String[] expectedArguments = {"hello", "world"}; From a4ce824ac9a1a8d8f956fad884c50686163908e6 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 12 Nov 2019 12:09:35 +0100 Subject: [PATCH 0427/1003] Parser: Add handlers for CSV/RDF/SPARQL data source declarations --- .../vlog4j/core/model/api/DataSource.java | 90 +- .../implementation/RdfFileDataSource.java | 8 +- .../parser/DataSourceDeclarationHandler.java | 41 +- .../vlog4j/parser/ParserConfiguration.java | 114 +- .../CsvFileDataSourceDeclarationHandler.java | 51 + .../RdfFileDataSourceDeclarationHandler.java | 51 + ...eryResultDataSourceDeclarationHandler.java | 67 + .../vlog4j/parser/javacc/JavaCCParser.jj | 1187 ++++++++--------- .../parser/javacc/JavaCCParserBase.java | 32 +- .../parser/javacc/SubParserFactory.java | 78 ++ .../parser/RuleParserDataSourceTest.java | 39 +- 11 files changed, 1020 insertions(+), 738 deletions(-) create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSource.java index d085716e6..504603d71 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSource.java @@ -1,39 +1,51 @@ -package org.semanticweb.vlog4j.core.model.api; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -/** - * Interfaces various types of data sources for storing facts. - * - * @author Irina Dragoste - * - */ -public interface DataSource extends Entity { - - /** - * Constructs a String representation of the data source. - * - * @return a String representation of the data source configuration for a - * certain predicate. - */ - public String toConfigString(); - -} +package org.semanticweb.vlog4j.core.model.api; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.Optional; + +/** + * Interfaces various types of data sources for storing facts. + * + * @author Irina Dragoste + * + */ +public interface DataSource extends Entity { + + /** + * Constructs a String representation of the data source. + * + * @return a String representation of the data source configuration for a + * certain predicate. + */ + public String toConfigString(); + + /** + * Retrieve the required arity of target predicates for the data source. + * + * @return the required arity for the data source, or Optional.empty() if there + * is none. + */ + public default Optional getRequiredArity() { + return Optional.empty(); + } + +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSource.java index e65515dc7..eb3ce09ea 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSource.java @@ -23,8 +23,8 @@ import java.io.File; import java.io.IOException; import java.util.Arrays; +import java.util.Optional; -import org.eclipse.jdt.annotation.NonNull; import org.semanticweb.vlog4j.core.model.implementation.Serializer; /** @@ -63,7 +63,7 @@ public class RdfFileDataSource extends FileDataSource { * {@code rdfFile} does not occur in * {@link #possibleExtensions}. */ - public RdfFileDataSource(@NonNull final File rdfFile) throws IOException { + public RdfFileDataSource(final File rdfFile) throws IOException { super(rdfFile, possibleExtensions); } @@ -77,4 +77,8 @@ public String getSyntacticRepresentation() { return Serializer.getString(this); } + @Override + public Optional getRequiredArity() { + return Optional.of(3); + } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java index 70a02b8de..d1f8766b1 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java @@ -20,8 +20,10 @@ * #L% */ +import java.util.List; import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; /** * Handler for parsing a custom Data Source declaration. @@ -30,14 +32,33 @@ */ @FunctionalInterface public interface DataSourceDeclarationHandler { - /** - * Parse a Data Source Declaration. - * - * @param arguments - * Arguments given to the Data Source declaration. - * - * @throws ParsingException when the given arguments are invalid for the Data Source. - * @return DataSource a DataSource instance. - */ - DataSource handleDeclaration(String[] arguments) throws ParsingException; + /** + * Parse a Data Source Declaration. + * + * @param arguments Arguments given to the Data Source declaration. + * @param subParserFactory a factory for obtaining a SubParser, sharing the + * parser's state, but bound to new input. + * + * @throws ParsingException when the given arity or arguments are invalid for + * the Data Source. + * @return a @{link DataSource} instance corresponding to the given arguments. + */ + public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) + throws ParsingException; + + /** + * Validate the provided number of arguments to the data source. + * + * @param arguments Arguments given to the Data Source declaration. + * @param number expected number of arguments + * + * @throws ParsingException when the given number of Arguments is invalid for + * the Data Source. + */ + static void verifyCorrectNumberOfArguments(List arguments, int number) throws ParsingException { + if (arguments.size() != number) { + throw new ParsingException("Invalid number of arguments " + arguments.size() + + " for Data Source declaration, expected " + number); + } + } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index 2b2dbb66f..e810aa930 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -1,10 +1,5 @@ package org.semanticweb.vlog4j.parser; -import java.util.HashMap; - -import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.parser.javacc.ParseException; - /*- * #%L * vlog4j-parser @@ -25,58 +20,81 @@ * #L% */ +import java.util.HashMap; +import java.util.List; + +import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.parser.datasources.CsvFileDataSourceDeclarationHandler; +import org.semanticweb.vlog4j.parser.datasources.RdfFileDataSourceDeclarationHandler; +import org.semanticweb.vlog4j.parser.datasources.SparqlQueryResultDataSourceDeclarationHandler; +import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; + /** * Class to keep parser configuration. * * @author Maximilian Marx */ public class ParserConfiguration { - /** - * Register a new Data Source. - * - * @param name - * Name of the data source, as it appears in the declaring directive. - * - * @param handler - * Handler for parsing a data source declaration. - * - * @throws IllegalArgumentException if the provided name is already registered. - * @return this - */ - public ParserConfiguration registerDataSource(String name, DataSourceDeclarationHandler handler) throws IllegalArgumentException { - if (dataSources.containsKey(name)) { - throw new IllegalArgumentException("Data source \"" + name + "\" is already registered."); - } + public ParserConfiguration() { + registerDefaultDataSources(); + } + + /** + * Register a new Data Source. + * + * @param name Name of the data source, as it appears in the declaring + * directive. + * @param handler Handler for parsing a data source declaration. + * + * @throws IllegalArgumentException if the provided name is already registered. + * @return this + */ + public ParserConfiguration registerDataSource(String name, DataSourceDeclarationHandler handler) + throws IllegalArgumentException { + if (dataSources.containsKey(name)) { + throw new IllegalArgumentException("Data source \"" + name + "\" is already registered."); + } + + this.dataSources.put(name, handler); + return this; + } + + /** + * Parse a Data Source declaration. + * + * @param name Name of the data source. + * @param args arguments given in the data source declaration. + * @param subParserFactory a {@link SubParserFactory} instance that creates + * parser with the same context as the current parser. + * + * @throws ParsingException when the declaration is invalid, e.g., if the Data + * Source is not known. + * + * @return the Data Source instance. + */ + public DataSource parseDataSourceDeclaration(String name, List args, + final SubParserFactory subParserFactory) throws ParsingException { + DataSourceDeclarationHandler handler = dataSources.get(name); - this.dataSources.put(name, handler); - return this; - } + if (handler == null) { + throw new ParsingException("Data source \"" + name + "\" is not known."); + } - /** - * Parse a Data Source declaration. - * - * @param name - * Name of the data source. - * - * @param args - * arguments given in the data source declaration. - * - * @throws ParsingException when the declaration is invalid, e.g., if the Data Source is not known. - * - * @return the Data Source instance. - */ - public DataSource parseDataSourceDeclaration(String name, String[] args) throws ParsingException { - DataSourceDeclarationHandler handler = dataSources.get(name); + return handler.handleDeclaration(args, subParserFactory); + } - if (handler == null) { - throw new ParsingException("Data source \"" + name + "\" is not known."); - } + /** + * Register built-in data sources (currently CSV, RDF, SPARQL). + */ + private void registerDefaultDataSources() { + registerDataSource("load-csv", new CsvFileDataSourceDeclarationHandler()); + registerDataSource("load-rdf", new RdfFileDataSourceDeclarationHandler()); + registerDataSource("sparql", new SparqlQueryResultDataSourceDeclarationHandler()); + } - return handler.handleDeclaration(args); - } - /** - * The registered data sources. - */ - HashMap dataSources = new HashMap<>(); + /** + * The registered data sources. + */ + private HashMap dataSources = new HashMap<>(); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java new file mode 100644 index 000000000..dc640dff5 --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java @@ -0,0 +1,51 @@ +package org.semanticweb.vlog4j.parser.datasources; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.File; +import java.io.IOException; +import java.util.List; + +import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.vlog4j.parser.DataSourceDeclarationHandler; +import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; + +/** + * Handler for parsing {@link CsvFileDataSource} declarations + * + * @author Maximilian Marx + */ +public class CsvFileDataSourceDeclarationHandler implements DataSourceDeclarationHandler { + @Override + public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) + throws ParsingException { + DataSourceDeclarationHandler.verifyCorrectNumberOfArguments(arguments, 1); + String fileName = arguments.get(0); + + try { + return new CsvFileDataSource(new File(fileName)); + } catch (IOException e) { + throw new ParsingException("Could not use source file \"" + fileName + "\": " + e.getMessage(), e); + } + } +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java new file mode 100644 index 000000000..29714b972 --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java @@ -0,0 +1,51 @@ +package org.semanticweb.vlog4j.parser.datasources; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.File; +import java.io.IOException; +import java.util.List; + +import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.vlog4j.parser.DataSourceDeclarationHandler; +import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; + +/** + * Handler for parsing {@link RdfFileDataSource} declarations + * + * @author Maximilian Marx + */ +public class RdfFileDataSourceDeclarationHandler implements DataSourceDeclarationHandler { + @Override + public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) + throws ParsingException { + DataSourceDeclarationHandler.verifyCorrectNumberOfArguments(arguments, 1); + String fileName = arguments.get(0); + + try { + return new RdfFileDataSource(new File(fileName)); + } catch (IOException e) { + throw new ParsingException("Could not use source file \"" + fileName + "\": " + e.getMessage(), e); + } + } +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java new file mode 100644 index 000000000..1faff9341 --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java @@ -0,0 +1,67 @@ +package org.semanticweb.vlog4j.parser.datasources; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.File; +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.List; + +import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; +import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.vlog4j.parser.DataSourceDeclarationHandler; +import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.javacc.JavaCCParser; +import org.semanticweb.vlog4j.parser.javacc.ParseException; +import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; + +/** + * Handler for parsing {@link SparqlQueryResultDataSource} declarations + * + * @author Maximilian Marx + */ +public class SparqlQueryResultDataSourceDeclarationHandler implements DataSourceDeclarationHandler { + @Override + public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) + throws ParsingException { + DataSourceDeclarationHandler.verifyCorrectNumberOfArguments(arguments, 3); + + String endpoint = arguments.get(0); + try { + JavaCCParser parser = subParserFactory.makeSubParser(endpoint); + endpoint = parser.IRI(false); + } catch (ParseException | PrefixDeclarationException e) { + throw new ParsingException(e); + } + + String variables = arguments.get(1); + String query = arguments.get(2); + + try { + return new SparqlQueryResultDataSource(new URL(endpoint), variables, query); + } catch (MalformedURLException e) { + throw new ParsingException("SPARQL endpoint \"" + endpoint + "\" is not a valid URL: " + e.getMessage(), e); + } + } +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index e58c47b5b..022a1815a 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -1,606 +1,581 @@ -options -{ - // Use \ u escapes in streams AND use a reader for the query - // => get both raw and escaped unicode - JAVA_UNICODE_ESCAPE = true; - UNICODE_INPUT = false; - - STATIC = false; - // DEBUG_PARSER = true; - // DEBUG_TOKEN_MANAGER = true ; -} - -PARSER_BEGIN(JavaCCParser) -package org.semanticweb.vlog4j.parser.javacc; - -import java.io.File; -import java.io.IOException; -import java.net.URL; -import java.net.MalformedURLException; - -import java.util.List; -import java.util.ArrayList; - -import org.semanticweb.vlog4j.parser.ParsingException; -import org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase; -import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; - -import org.semanticweb.vlog4j.core.model.api.Statement; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; - -import org.semanticweb.vlog4j.core.model.implementation.Expressions; - - -public class JavaCCParser extends JavaCCParserBase -{ -} - -PARSER_END(JavaCCParser) - - -void parse() throws PrefixDeclarationException: -{ -} -{ - ( base() )? - ( prefix() )* - ( source() )* - ( statement() )* - < EOF > -} - -void base() throws PrefixDeclarationException: -{ - String iriString; -} -{ - < BASE > iriString = IRIREF() < DOT > - { - prefixDeclarations.setBase(iriString); - } -} - -void prefix() throws PrefixDeclarationException: -{ - Token t; - String iriString; -} -{ - ( - LOOKAHEAD(< COLON >) < PREFIX > t = < COLON > iriString = IRIREF() < DOT > - | < PREFIX > t = < PNAME_NS > iriString = IRIREF() < DOT > - ) - { - //note that prefix includes the colon (:) - prefixDeclarations.setPrefix(t.image, iriString); - } -} - -void source() throws PrefixDeclarationException: -{ - String predicateName; - DataSource dataSource; - Token arity; -} -{ - < SOURCE > predicateName = predicateName() < LPAREN > arity = < INTEGER > < RPAREN > < COLON > dataSource = dataSource() < DOT > - { - int nArity; - nArity = Integer.parseInt(arity.image); - // Do not catch NumberFormatException: < INTEGER > matches must parse as int in Java! - if ( dataSource instanceof RdfFileDataSource && nArity != 3 ) - throw new ParseException("Cannot load RDF data into predicate of arity " + nArity +"."); - addDataSource(predicateName, nArity,dataSource); - } -} - -DataSource dataSource() throws PrefixDeclarationException: -{ - String fileName; - String endpoint; - String variables; - String query; - Token sourceName; - String[] arguments; -} -{ - < LOADCSV > < LPAREN > fileName = String() < RPAREN > - { - try { - return new CsvFileDataSource(new File(fileName)) ; - } catch (IOException e) { - throw new ParseException("Could not use source file \"" + fileName +"\": " + e.getMessage()); - } - } -| < LOADRDF > < LPAREN > fileName = String() < RPAREN > - { - try { - return new RdfFileDataSource(new File(fileName)) ; - } catch (IOException e) { - throw new ParseException("Could not use source file \"" + fileName +"\": " + e.getMessage()); - } - } -| < SPARQL > < LPAREN > endpoint = IRI(false) < COMMA > variables = String() < COMMA > query = String() < RPAREN > - { - try { - return new SparqlQueryResultDataSource(new URL(endpoint), variables, query); - } catch (MalformedURLException e) { - throw new ParseException("SPARQL endoint \"" + endpoint +"\" is not a valid URL: " + e.getMessage()); - } - } -| sourceName = < DIRECTIVENAME > < LPAREN > arguments = Strings() < RPAREN > - { - try { - return parserConfiguration.parseDataSourceDeclaration(sourceName.image, arguments); - } catch (ParsingException e) { - throw new ParseException(e.getMessage()); - } - } -} - - -void statement() throws PrefixDeclarationException: -{ - Statement statement; - resetVariableSets(); -} -{ - LOOKAHEAD(rule()) statement = rule() { knowledgeBase.addStatement(statement);} -| statement = fact(FormulaContext.HEAD) < DOT > //not from a rule - { - knowledgeBase.addStatement(statement); - } -} - -Rule rule() throws PrefixDeclarationException: -{ - List < PositiveLiteral > head; - List < Literal > body; -} -{ - head = listOfPositiveLiterals(FormulaContext.HEAD) < ARROW > body = listOfLiterals(FormulaContext.BODY) < DOT > - { - // check that the intersection between headExiVars and BodyVars is empty - for (String variable : headExiVars) { - if (bodyVars.contains(variable)) - throw new ParseException("Malformed rule " + head + " :- " + body + "\nExistential variable " + variable + " also used in rule body."); - } - - // check that bodyVars contains headUniVars - for (String variable : headUniVars) { - if (!bodyVars.contains(variable)) - throw new ParseException("Unsafe rule " + head + " :- " + body + "\nUniversal variable " + variable + " occurs in head but not in body."); - } - - return Expressions.makeRule(Expressions.makePositiveConjunction(head), Expressions.makeConjunction(body)); - } -} - -List < PositiveLiteral > listOfPositiveLiterals(FormulaContext context) throws PrefixDeclarationException: -{ - PositiveLiteral l; - List < PositiveLiteral > list = new ArrayList < PositiveLiteral > (); -} -{ - l = positiveLiteral(context) { list.add(l); } - ( < COMMA > l = positiveLiteral(context) { list.add(l); } )* - { return list; } -} - -List < Literal > listOfLiterals(FormulaContext context) throws PrefixDeclarationException: -{ - Literal l; - List < Literal > list = new ArrayList < Literal > (); -} -{ - l = literal(context) { list.add(l); } - ( < COMMA > l = literal(context) { list.add(l); } )* - { return list; } -} - -Literal literal(FormulaContext context) throws PrefixDeclarationException: -{ - Literal l = null; -} -{ - l = positiveLiteral(context) { return l; } -| l = negativeLiteral(context) { return l; } -} - -PositiveLiteral positiveLiteral(FormulaContext context) throws PrefixDeclarationException: -{ - Token t; - List < Term > terms; - String predicateName; -} -{ - predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > - { return Expressions.makePositiveLiteral(predicateName, terms); } -} - -Fact fact(FormulaContext context) throws PrefixDeclarationException: -{ - Token t; - List < Term > terms; - String predicateName; -} -{ - predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > - { - try { - return Expressions.makeFact(predicateName, terms); - } catch (IllegalArgumentException e) { - throw new ParseException("Error parsing fact: " + e.toString()); - } - } -} - -NegativeLiteral negativeLiteral(FormulaContext context) throws PrefixDeclarationException: -{ - List < Term > terms; - String predicateName; -} -{ - < TILDE > predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > - { return Expressions.makeNegativeLiteral(predicateName, terms); } -} - -List < Term > listOfTerms(FormulaContext context) throws PrefixDeclarationException: -{ - Term t; - List < Term > list = new ArrayList < Term > (); -} -{ - t = term(context) { list.add(t); } - ( < COMMA > t = term(context) { list.add(t); } )* - { return list; } -} - -String predicateName() throws PrefixDeclarationException: -{ - String s; - Token t; -} -{ - s = IRI(false) { return s; } -| t = < VARORPREDNAME > { return prefixDeclarations.absolutize(t.image); } -} - -Term term(FormulaContext context) throws PrefixDeclarationException: -{ - Token t; - String s; - Constant c; -} -{ //TODO move Expressions.makeConstant to JavaCCParserBase - s = IRI(false) { return Expressions.makeAbstractConstant(s); } -| c = NumericLiteral() { return c; } -| c = RDFLiteral() { return c; } -| t = < UNIVAR > - { - s = t.image.substring(1); - if (context == FormulaContext.HEAD) - headUniVars.add(s); - else if (context == FormulaContext.BODY) - bodyVars.add(s); - return Expressions.makeUniversalVariable(s); - } -| t = < EXIVAR > - { - s = t.image.substring(1); - if (context == FormulaContext.HEAD) - headExiVars.add(s); - if (context == FormulaContext.BODY) - throw new ParseException("Existentialy quantified variables can not appear in the body. Line: " + t.beginLine + ", Column: "+ t.beginColumn); - return Expressions.makeExistentialVariable(s); - } -| t = < VARORPREDNAME > { return Expressions.makeAbstractConstant(prefixDeclarations.absolutize(t.image));} -} - -/** [16] */ -Constant NumericLiteral() : -{ - Token t; -} -{ - t = < INTEGER > { return createIntegerConstant(t.image); } -| t = < DECIMAL > { return createDecimalConstant(t.image); } -| t = < DOUBLE > { return createDoubleConstant(t.image); } -} - -Constant RDFLiteral() throws PrefixDeclarationException: -{ - Token t; - String lex = null; - String lang = null; // Optional lang tag and datatype. - String dt = null; -} -{ - lex = String() ( lang = Langtag() | < DATATYPE > dt = IRI(false) )? - { return createDataConstant(lex, lang, dt); } -} - -String Langtag() : -{ - Token t; -} -{ - // Enumerate the directives here because they look like language tags. - ( - t = < LANGTAG > - ) - { - String lang = stripChars(t.image, 1); - return lang; - } -} - -String String(): -{ - Token t; - String lex; -} -{ - ( - t = < STRING_LITERAL1 > { lex = stripQuotes(t.image); } - | t = < STRING_LITERAL2 > { lex = stripQuotes(t.image); } - | t = < STRING_LITERAL_LONG1 > { lex = stripQuotes3(t.image); } - | t = < STRING_LITERAL_LONG2 > { lex = stripQuotes3(t.image); } - ) - { - lex = unescapeStr(lex, t.beginLine, t.beginColumn); - return lex; - } -} - -String[] Strings(): -{ - String str; - String[] rest = {}; -} -{ - str = String() [< COMMA > rest = Strings()] - { - return collectStrings(str, rest); - } -} - -String IRI(boolean includeAngleBrackets) throws PrefixDeclarationException: -{ - String iri; -} -{ - ( - iri = IRIREF() - | iri = PrefixedName() - ) - { - String result = prefixDeclarations.absolutize(iri); - if (includeAngleBrackets) { - result = "<"+result+">"; - } - return result; - } -} - -String PrefixedName() throws PrefixDeclarationException: -{ - Token t; -} -{ - //( - t = < PNAME_LN > - //| t = < PNAME_NS > - //) - { return prefixDeclarations.resolvePrefixedName(t.image);} - //{ return prefixDeclarations.resolvePrefixedName(t.image, t.beginLine, t.beginColumn);} -} - -String IRIREF() : -{ - Token t; -} -{ - t = < IRI > - { - // we remove '<' and '>' - return t.image.substring(1,t.image.length()-1); - } -} - -// ------------------------------------------ -// Whitespace -SKIP : -{ - " " -| "\t" -| "\n" -| "\r" -| "\f" -} - -//Comments -SKIP :{< "%" (~["\n"])* "\n" >} - -// ------------------------------------------ -TOKEN : -{ - < PREFIX : "@prefix" > -| < BASE : "@base" > -| < SOURCE : "@source" > -| < LOADCSV : "load-csv"> -| < LOADRDF : "load-rdf"> -| < SPARQL : "sparql"> -} - -TOKEN: -{ - < INTEGER : ([ "-", "+" ])? < DIGITS > > -| < DECIMAL : - ([ "-", "+" ])? - ( - (< DIGITS >)+ "." (< DIGITS >)* - | "." (< DIGITS >)+ - ) - > -| < DOUBLE : - ([ "+", "-" ])? - ( - ([ "0"-"9" ])+ "." ([ "0"-"9" ])* < EXPONENT > - | "." ([ "0"-"9" ])+ (< EXPONENT >) - | ([ "0"-"9" ])+ < EXPONENT > - ) - > -| < #DIGITS : ([ "0"-"9" ])+ > -| < #EXPONENT : [ "e", "E" ] ([ "+", "-" ])? ([ "0"-"9" ])+ > -} - -TOKEN: -{ - < STRING_LITERAL1 : - // Single quoted string - "'" - ( - (~[ "'", "\\", "\n", "\r" ]) - | < ECHAR > - )* - "'" > -| < STRING_LITERAL2 : - // Double quoted string - "\"" - ( - (~[ "\"", "\\", "\n", "\r" ]) - | < ECHAR > - )* - "\"" > -| < STRING_LITERAL_LONG1 : - "'''" - ( - ~[ "'", "\\" ] - | < ECHAR > - | ("'" ~[ "'" ]) - | ("''" ~[ "'" ]) - )* - "'''" > -| < STRING_LITERAL_LONG2 : - "\"\"\"" - ( - ~[ "\"", "\\" ] - | < ECHAR > - | ("\"" ~[ "\"" ]) - | ("\"\"" ~[ "\"" ]) - )* - "\"\"\"" > -| < #ECHAR : - "\\" - ( - "t" - | "b" - | "n" - | "r" - | "f" - | "\\" - | "\"" - | "'" - ) > -} - -TOKEN : -{ - // Includes # for relative URIs - < IRI : "<" (~[ ">", "<", "\"", "{", "}", "^", "\\", "|", "`", "\u0000"-"\u0020" ])* ">" > -| < PNAME_LN : (< PN_PREFIX >)? ":" < PN_LOCAL > > -| < PNAME_NS : < PN_PREFIX > ":" > -| < UNIVAR : < QMARK > < VARORPREDNAME > > -| < EXIVAR : < EMARK > < VARORPREDNAME > > -| < LANGTAG : - < AT > (< A2Z >)+ - ( - "-" (< A2ZN >)+ - )* > -| < VARORPREDNAME : < A2Z> (< A2ZN >)* > -| < #A2Z : [ "a"-"z", "A"-"Z" ] > -| < #A2ZN : [ "a"-"z", "A"-"Z", "0"-"9" ] > -| < DIRECTIVENAME : < A2Z > (< A2ZNX >)* > -| < #A2ZNX : [ "a"-"z", "A"-"Z", "0"-"9", "-", "_" ] > -} - -TOKEN : -{ - < LPAREN : "(" > -| < RPAREN : ")" > -| < COMMA : "," > -| < DOT : "." > -| < ARROW : ":-" > -| < QMARK : "?" > -| < EMARK : "!" > -| < TILDE : "~" > -| < COLON : ":" > -| < DATATYPE : "^^" > -| < AT : "@" > -} - -TOKEN : -{ - < #PN_CHARS_BASE : - [ "A"-"Z" ] - | [ "a"-"z" ] - | [ "\u00c0"-"\u00d6" ] - | [ "\u00d8"-"\u00f6" ] - | [ "\u00f8"-"\u02ff" ] - | [ "\u0370"-"\u037d" ] - | [ "\u037f"-"\u1fff" ] - | [ "\u200c"-"\u200d" ] - | [ "\u2070"-"\u218f" ] - | [ "\u2c00"-"\u2fef" ] - | [ "\u3001"-"\ud7ff" ] - | [ "\uf900"-"\ufffd" ] - > - // | [ ""#x10000-#xEFFFF] -| - < #PN_CHARS_U : - < PN_CHARS_BASE > - | "_" > -| < #PN_CHARS : - ( - < PN_CHARS_U > - | "-" - | [ "0"-"9" ] - | "\u00b7" - | [ "\u0300"-"\u036f" ] - | [ "\u203f"-"\u2040" ] - ) > -| < #PN_PREFIX : - < PN_CHARS_BASE > - ( - ( - < PN_CHARS > - | "." - )* - < PN_CHARS > - )? > -| < #PN_LOCAL : - ( - < PN_CHARS_U > - | ":" - | [ "0"-"9" ] - ) - ( - ( - < PN_CHARS > - | "." - | ":" - )* - < PN_CHARS > - )? > -} +options +{ + // Use \ u escapes in streams AND use a reader for the query + // => get both raw and escaped unicode + JAVA_UNICODE_ESCAPE = true; + UNICODE_INPUT = false; + + STATIC = false; + // DEBUG_PARSER = true; + // DEBUG_TOKEN_MANAGER = true ; +} + +PARSER_BEGIN(JavaCCParser) +package org.semanticweb.vlog4j.parser.javacc; + +import java.io.File; +import java.io.InputStream; +import java.io.IOException; +import java.net.URL; +import java.net.MalformedURLException; + +import java.util.List; +import java.util.ArrayList; +import java.util.LinkedList; + +import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase; +import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; + +import org.semanticweb.vlog4j.core.model.api.Statement; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Literal; +import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; + +import org.semanticweb.vlog4j.core.model.implementation.Expressions; + + +public class JavaCCParser extends JavaCCParserBase +{ + private SubParserFactory getSubParserFactory() { + return new SubParserFactory(this); + } +} + +PARSER_END(JavaCCParser) + + +void parse() throws PrefixDeclarationException: +{ +} +{ + ( base() )? + ( prefix() )* + ( source() )* + ( statement() )* + < EOF > +} + +void base() throws PrefixDeclarationException: +{ + String iriString; +} +{ + < BASE > iriString = IRIREF() < DOT > + { + prefixDeclarations.setBase(iriString); + } +} + +void prefix() throws PrefixDeclarationException: +{ + Token t; + String iriString; +} +{ + ( + LOOKAHEAD(< COLON >) < PREFIX > t = < COLON > iriString = IRIREF() < DOT > + | < PREFIX > t = < PNAME_NS > iriString = IRIREF() < DOT > + ) + { + //note that prefix includes the colon (:) + prefixDeclarations.setPrefix(t.image, iriString); + } +} + +void source() throws PrefixDeclarationException: +{ + String predicateName; + DataSource dataSource; + Token arity; +} +{ + < SOURCE > predicateName = predicateName() < LPAREN > arity = < INTEGER > < RPAREN > < COLON > dataSource = dataSource() < DOT > + { + int nArity; + nArity = Integer.parseInt(arity.image); + // Do not catch NumberFormatException: < INTEGER > matches must parse as int in Java! + if ( dataSource instanceof RdfFileDataSource && nArity != 3 ) + throw new ParseException("Cannot load RDF data into predicate of arity " + nArity +"."); + + addDataSource(predicateName, nArity, dataSource); + } +} + +DataSource dataSource() throws PrefixDeclarationException: +{ + Token sourceName; + List< String > arguments; +} +{ + (sourceName = < DIRECTIVENAME > | sourceName = < VARORPREDNAME >) < LPAREN > arguments = Arguments() < RPAREN > + { + try { + return parserConfiguration.parseDataSourceDeclaration(sourceName.image, arguments, getSubParserFactory()); + } catch (ParsingException e) { + throw new ParseException(e.getMessage()); + } + } +} + +void statement() throws PrefixDeclarationException: +{ + Statement statement; + resetVariableSets(); +} +{ + LOOKAHEAD(rule()) statement = rule() { knowledgeBase.addStatement(statement);} +| statement = fact(FormulaContext.HEAD) < DOT > //not from a rule + { + knowledgeBase.addStatement(statement); + } +} + +Rule rule() throws PrefixDeclarationException: +{ + List < PositiveLiteral > head; + List < Literal > body; +} +{ + head = listOfPositiveLiterals(FormulaContext.HEAD) < ARROW > body = listOfLiterals(FormulaContext.BODY) < DOT > + { + // check that the intersection between headExiVars and BodyVars is empty + for (String variable : headExiVars) { + if (bodyVars.contains(variable)) + throw new ParseException("Malformed rule " + head + " :- " + body + "\nExistential variable " + variable + " also used in rule body."); + } + + // check that bodyVars contains headUniVars + for (String variable : headUniVars) { + if (!bodyVars.contains(variable)) + throw new ParseException("Unsafe rule " + head + " :- " + body + "\nUniversal variable " + variable + " occurs in head but not in body."); + } + + return Expressions.makeRule(Expressions.makePositiveConjunction(head), Expressions.makeConjunction(body)); + } +} + +List < PositiveLiteral > listOfPositiveLiterals(FormulaContext context) throws PrefixDeclarationException: +{ + PositiveLiteral l; + List < PositiveLiteral > list = new ArrayList < PositiveLiteral > (); +} +{ + l = positiveLiteral(context) { list.add(l); } + ( < COMMA > l = positiveLiteral(context) { list.add(l); } )* + { return list; } +} + +List < Literal > listOfLiterals(FormulaContext context) throws PrefixDeclarationException: +{ + Literal l; + List < Literal > list = new ArrayList < Literal > (); +} +{ + l = literal(context) { list.add(l); } + ( < COMMA > l = literal(context) { list.add(l); } )* + { return list; } +} + +Literal literal(FormulaContext context) throws PrefixDeclarationException: +{ + Literal l = null; +} +{ + l = positiveLiteral(context) { return l; } +| l = negativeLiteral(context) { return l; } +} + +PositiveLiteral positiveLiteral(FormulaContext context) throws PrefixDeclarationException: +{ + Token t; + List < Term > terms; + String predicateName; +} +{ + predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > + { return Expressions.makePositiveLiteral(predicateName, terms); } +} + +Fact fact(FormulaContext context) throws PrefixDeclarationException: +{ + Token t; + List < Term > terms; + String predicateName; +} +{ + predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > + { + try { + return Expressions.makeFact(predicateName, terms); + } catch (IllegalArgumentException e) { + throw new ParseException("Error parsing fact: " + e.toString()); + } + } +} + +NegativeLiteral negativeLiteral(FormulaContext context) throws PrefixDeclarationException: +{ + List < Term > terms; + String predicateName; +} +{ + < TILDE > predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > + { return Expressions.makeNegativeLiteral(predicateName, terms); } +} + +List < Term > listOfTerms(FormulaContext context) throws PrefixDeclarationException: +{ + Term t; + List < Term > list = new ArrayList < Term > (); +} +{ + t = term(context) { list.add(t); } + ( < COMMA > t = term(context) { list.add(t); } )* + { return list; } +} + +String predicateName() throws PrefixDeclarationException: +{ + String s; + Token t; +} +{ + s = IRI(false) { return s; } +| t = < VARORPREDNAME > { return prefixDeclarations.absolutize(t.image); } +} + +Term term(FormulaContext context) throws PrefixDeclarationException: +{ + Token t; + String s; + Constant c; +} +{ //TODO move Expressions.makeConstant to JavaCCParserBase + s = IRI(false) { return Expressions.makeAbstractConstant(s); } +| c = NumericLiteral() { return c; } +| c = RDFLiteral() { return c; } +| t = < UNIVAR > + { + s = t.image.substring(1); + if (context == FormulaContext.HEAD) + headUniVars.add(s); + else if (context == FormulaContext.BODY) + bodyVars.add(s); + return Expressions.makeUniversalVariable(s); + } +| t = < EXIVAR > + { + s = t.image.substring(1); + if (context == FormulaContext.HEAD) + headExiVars.add(s); + if (context == FormulaContext.BODY) + throw new ParseException("Existentialy quantified variables can not appear in the body. Line: " + t.beginLine + ", Column: "+ t.beginColumn); + return Expressions.makeExistentialVariable(s); + } +| t = < VARORPREDNAME > { return Expressions.makeAbstractConstant(prefixDeclarations.absolutize(t.image));} +} + +/** [16] */ +Constant NumericLiteral() : +{ + Token t; +} +{ + t = < INTEGER > { return createIntegerConstant(t.image); } +| t = < DECIMAL > { return createDecimalConstant(t.image); } +| t = < DOUBLE > { return createDoubleConstant(t.image); } +} + +Constant RDFLiteral() throws PrefixDeclarationException: +{ + Token t; + String lex = null; + String lang = null; // Optional lang tag and datatype. + String dt = null; +} +{ + lex = String() ( lang = Langtag() | < DATATYPE > dt = IRI(false) )? + { return createDataConstant(lex, lang, dt); } +} + +String Langtag() : +{ + Token t; +} +{ + // Enumerate the directives here because they look like language tags. + ( + t = < LANGTAG > + ) + { + String lang = stripChars(t.image, 1); + return lang; + } +} + +String String(): +{ + Token t; + String lex; +} +{ + ( + t = < STRING_LITERAL1 > { lex = stripQuotes(t.image); } + | t = < STRING_LITERAL2 > { lex = stripQuotes(t.image); } + | t = < STRING_LITERAL_LONG1 > { lex = stripQuotes3(t.image); } + | t = < STRING_LITERAL_LONG2 > { lex = stripQuotes3(t.image); } + ) + { + lex = unescapeStr(lex, t.beginLine, t.beginColumn); + return lex; + } +} + +LinkedList< String > Arguments() throws PrefixDeclarationException: +{ + String str; + LinkedList< String > rest = new LinkedList< String >(); +} +{ + (str = String() | str = IRI(true)) [< COMMA > rest = Arguments()] + { + rest.addFirst(str); + return rest; + } +} + +String IRI(boolean includeAngleBrackets) throws PrefixDeclarationException: +{ + String iri; +} +{ + ( + iri = IRIREF() + | iri = PrefixedName() + ) + { + String result = prefixDeclarations.absolutize(iri); + if (includeAngleBrackets) { + result = "<"+result+">"; + } + return result; + } +} + +String PrefixedName() throws PrefixDeclarationException: +{ + Token t; +} +{ + //( + t = < PNAME_LN > + //| t = < PNAME_NS > + //) + { return prefixDeclarations.resolvePrefixedName(t.image);} + //{ return prefixDeclarations.resolvePrefixedName(t.image, t.beginLine, t.beginColumn);} +} + +String IRIREF() : +{ + Token t; +} +{ + t = < IRI > + { + // we remove '<' and '>' + return t.image.substring(1,t.image.length()-1); + } +} + +// ------------------------------------------ +// Whitespace +SKIP : +{ + " " +| "\t" +| "\n" +| "\r" +| "\f" +} + +//Comments +SKIP :{< "%" (~["\n"])* "\n" >} + +// ------------------------------------------ +TOKEN : +{ + < PREFIX : "@prefix" > +| < BASE : "@base" > +| < SOURCE : "@source" > +} + +TOKEN: +{ + < INTEGER : ([ "-", "+" ])? < DIGITS > > +| < DECIMAL : + ([ "-", "+" ])? + ( + (< DIGITS >)+ "." (< DIGITS >)* + | "." (< DIGITS >)+ + ) + > +| < DOUBLE : + ([ "+", "-" ])? + ( + ([ "0"-"9" ])+ "." ([ "0"-"9" ])* < EXPONENT > + | "." ([ "0"-"9" ])+ (< EXPONENT >) + | ([ "0"-"9" ])+ < EXPONENT > + ) + > +| < #DIGITS : ([ "0"-"9" ])+ > +| < #EXPONENT : [ "e", "E" ] ([ "+", "-" ])? ([ "0"-"9" ])+ > +} + +TOKEN: +{ + < STRING_LITERAL1 : + // Single quoted string + "'" + ( + (~[ "'", "\\", "\n", "\r" ]) + | < ECHAR > + )* + "'" > +| < STRING_LITERAL2 : + // Double quoted string + "\"" + ( + (~[ "\"", "\\", "\n", "\r" ]) + | < ECHAR > + )* + "\"" > +| < STRING_LITERAL_LONG1 : + "'''" + ( + ~[ "'", "\\" ] + | < ECHAR > + | ("'" ~[ "'" ]) + | ("''" ~[ "'" ]) + )* + "'''" > +| < STRING_LITERAL_LONG2 : + "\"\"\"" + ( + ~[ "\"", "\\" ] + | < ECHAR > + | ("\"" ~[ "\"" ]) + | ("\"\"" ~[ "\"" ]) + )* + "\"\"\"" > +| < #ECHAR : + "\\" + ( + "t" + | "b" + | "n" + | "r" + | "f" + | "\\" + | "\"" + | "'" + ) > +} + +TOKEN : +{ + // Includes # for relative URIs + < IRI : "<" (~[ ">", "<", "\"", "{", "}", "^", "\\", "|", "`", "\u0000"-"\u0020" ])* ">" > +| < PNAME_LN : (< PN_PREFIX >)? ":" < PN_LOCAL > > +| < PNAME_NS : < PN_PREFIX > ":" > +| < UNIVAR : < QMARK > < VARORPREDNAME > > +| < EXIVAR : < EMARK > < VARORPREDNAME > > +| < LANGTAG : + < AT > (< A2Z >)+ + ( + "-" (< A2ZN >)+ + )* > +| < VARORPREDNAME : < A2Z> (< A2ZN >)* > +| < #A2Z : [ "a"-"z", "A"-"Z" ] > +| < #A2ZN : [ "a"-"z", "A"-"Z", "0"-"9" ] > +| < DIRECTIVENAME : < A2Z > (< A2ZNX >)* > +| < #A2ZNX : [ "a"-"z", "A"-"Z", "0"-"9", "-", "_" ] > +} + +TOKEN : +{ + < LPAREN : "(" > +| < RPAREN : ")" > +| < COMMA : "," > +| < DOT : "." > +| < ARROW : ":-" > +| < QMARK : "?" > +| < EMARK : "!" > +| < TILDE : "~" > +| < COLON : ":" > +| < DATATYPE : "^^" > +| < AT : "@" > +} + +TOKEN : +{ + < #PN_CHARS_BASE : + [ "A"-"Z" ] + | [ "a"-"z" ] + | [ "\u00c0"-"\u00d6" ] + | [ "\u00d8"-"\u00f6" ] + | [ "\u00f8"-"\u02ff" ] + | [ "\u0370"-"\u037d" ] + | [ "\u037f"-"\u1fff" ] + | [ "\u200c"-"\u200d" ] + | [ "\u2070"-"\u218f" ] + | [ "\u2c00"-"\u2fef" ] + | [ "\u3001"-"\ud7ff" ] + | [ "\uf900"-"\ufffd" ] + > + // | [ ""#x10000-#xEFFFF] +| + < #PN_CHARS_U : + < PN_CHARS_BASE > + | "_" > +| < #PN_CHARS : + ( + < PN_CHARS_U > + | "-" + | [ "0"-"9" ] + | "\u00b7" + | [ "\u0300"-"\u036f" ] + | [ "\u203f"-"\u2040" ] + ) > +| < #PN_PREFIX : + < PN_CHARS_BASE > + ( + ( + < PN_CHARS > + | "." + )* + < PN_CHARS > + )? > +| < #PN_LOCAL : + ( + < PN_CHARS_U > + | ":" + | [ "0"-"9" ] + ) + ( + ( + < PN_CHARS > + | "." + | ":" + )* + < PN_CHARS > + )? > +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 7f0929e5f..8902a4c08 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -52,7 +52,7 @@ * */ public class JavaCCParserBase { - final PrefixDeclarations prefixDeclarations = new LocalPrefixDeclarations(); + PrefixDeclarations prefixDeclarations; KnowledgeBase knowledgeBase; ParserConfiguration parserConfiguration; @@ -91,6 +91,7 @@ public enum FormulaContext { public JavaCCParserBase() { this.knowledgeBase = new KnowledgeBase(); + this.prefixDeclarations = new LocalPrefixDeclarations(); this.parserConfiguration = new ParserConfiguration(); } @@ -106,22 +107,19 @@ Constant createDoubleConstant(String lexicalForm) { return Expressions.makeDatatypeConstant(lexicalForm, PrefixDeclarations.XSD_DOUBLE); } - void addDataSource(String predicateName, int arity, DataSource dataSource) { + void addDataSource(String predicateName, int arity, DataSource dataSource) throws ParseException { + if (dataSource.getRequiredArity().isPresent()) { + Integer requiredArity = dataSource.getRequiredArity().get(); + if (requiredArity != arity) { + throw new ParseException("Invalid arity " + arity + " for data source, " + + "expected " + requiredArity + "."); + } + } + Predicate predicate = Expressions.makePredicate(predicateName, arity); knowledgeBase.addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); } - static String[] collectStrings(String str, String[] rest) { - ArrayList strings = new ArrayList<>(); - strings.add(str); - - for (String next : rest) { - strings.add(next); - } - - return strings.toArray(rest); - } - static String unescapeStr(String s, int line, int column) throws ParseException { return unescape(s, '\\', false, line, column); } @@ -213,7 +211,7 @@ static String stripChars(String s, int n) { /** * Creates a suitable {@link Constant} from the parsed data. - * + * * @param string the string data (unescaped) * @param languageTag the language tag, or null if not present * @param datatype the datatype, or null if not provided @@ -255,5 +253,11 @@ public ParserConfiguration getParserConfiguration() { return parserConfiguration; } + protected void setPrefixDeclarations(PrefixDeclarations prefixDeclarations) { + this.prefixDeclarations = prefixDeclarations; + } + protected PrefixDeclarations getPrefixDeclarations() { + return prefixDeclarations; + } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java new file mode 100644 index 000000000..ddea292b8 --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java @@ -0,0 +1,78 @@ +package org.semanticweb.vlog4j.parser.javacc; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.InputStream; +import java.io.ByteArrayInputStream; + +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.parser.ParserConfiguration; + +/** + * Factory for creating a SubParser sharing configuration, state, and + * prefixes, but with an independent input stream, to be used, e.g., + * for parsing arguments in data source declarations. + * + * @author Maximilian Marx + */ +public class SubParserFactory { + /** + * Construct a SubParserFactory. + * + * @argument parser the parser instance to get the state from. + */ + SubParserFactory(JavaCCParser parser) { + this.knowledgeBase = parser.getKnowledgeBase(); + this.prefixDeclarations = parser.getPrefixDeclarations(); + this.parserConfiguration = parser.getParserConfiguration(); + } + + /** + * Create a new parser with the specified state and given input. + * + * @argument inputStream the input stream to parse. + * @argument encoding encoding of the input stream. + * + * @return A new {@link JavaCCParser} bound to inputStream and + * with the specified parser state. + */ + public JavaCCParser makeSubParser(final InputStream inputStream, final String encoding) { + JavaCCParser subParser = new JavaCCParser(inputStream, encoding); + subParser.setKnowledgeBase(knowledgeBase); + subParser.setPrefixDeclarations(prefixDeclarations); + subParser.setParserConfiguration(parserConfiguration); + + return subParser; + } + + public JavaCCParser makeSubParser(final InputStream inputStream) { + return makeSubParser(inputStream, "UTF-8"); + } + + public JavaCCParser makeSubParser(final String string) { + return makeSubParser(new ByteArrayInputStream(string.getBytes())); + } + + private KnowledgeBase knowledgeBase; + private ParserConfiguration parserConfiguration; + private PrefixDeclarations prefixDeclarations; +} diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java index 8f2a3116a..aee340a60 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java @@ -29,8 +29,7 @@ import java.net.URL; import java.util.ArrayList; import java.util.Arrays; - -import javax.sql.DataSource; +import java.util.List; import org.junit.Test; import org.mockito.ArgumentMatchers; @@ -46,6 +45,7 @@ import org.semanticweb.vlog4j.parser.ParserConfiguration; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; +import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; public class RuleParserDataSourceTest { @Test @@ -91,24 +91,25 @@ public void testSparqlSourceMalformedUrl() throws ParsingException, MalformedURL RuleParser.parse(input); } - @Test(expected = ParsingException.class) - public void testUnknownDataSource() throws ParsingException { - String input = "@source p(2) : unknown-data-source(\"hello, world\") ."; - RuleParser.parse(input); - } + @Test(expected = ParsingException.class) + public void testUnknownDataSource() throws ParsingException { + String input = "@source p(2) : unknown-data-source(\"hello, world\") ."; + RuleParser.parse(input); + } - @Test - public void testCustomDataSource() throws ParsingException { - CsvFileDataSource source = mock(CsvFileDataSource.class); - DataSourceDeclarationHandler handler = mock(DataSourceDeclarationHandler.class); - ParserConfiguration parserConfiguration = new ParserConfiguration(); - parserConfiguration.registerDataSource("mock-source", handler); - doReturn(source).when(handler).handleDeclaration(ArgumentMatchers.any()); + @Test + public void testCustomDataSource() throws ParsingException { + CsvFileDataSource source = mock(CsvFileDataSource.class); + DataSourceDeclarationHandler handler = mock(DataSourceDeclarationHandler.class); + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerDataSource("mock-source", handler); + doReturn(source).when(handler).handleDeclaration(ArgumentMatchers.>any(), + ArgumentMatchers.any()); - String input = "@source p(2) : mock-source(\"hello\", \"world\") ."; - String[] expectedArguments = {"hello", "world"}; - RuleParser.parse(input, parserConfiguration); + String input = "@source p(2) : mock-source(\"hello\", \"world\") ."; + List expectedArguments = Arrays.asList("hello", "world"); + RuleParser.parse(input, parserConfiguration); - verify(handler).handleDeclaration(eq(expectedArguments)); - } + verify(handler).handleDeclaration(eq(expectedArguments), ArgumentMatchers.any()); + } } From 373df673d6315973339644414b7ad8100342ac21 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 12 Nov 2019 13:38:43 +0100 Subject: [PATCH 0428/1003] Parser: Split out Default Parser Configuration --- .../parser/DefaultParserConfiguration.java | 46 +++++++++++++++++++ .../vlog4j/parser/ParserConfiguration.java | 17 ------- .../parser/javacc/JavaCCParserBase.java | 23 +++++----- 3 files changed, 57 insertions(+), 29 deletions(-) create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java new file mode 100644 index 000000000..168a738bb --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java @@ -0,0 +1,46 @@ +package org.semanticweb.vlog4j.parser; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.vlog4j.parser.datasources.CsvFileDataSourceDeclarationHandler; +import org.semanticweb.vlog4j.parser.datasources.RdfFileDataSourceDeclarationHandler; +import org.semanticweb.vlog4j.parser.datasources.SparqlQueryResultDataSourceDeclarationHandler; + +/** + * Default parser configuration. Registers default data sources. + * + * @author Maximilian Marx + */ +public class DefaultParserConfiguration extends ParserConfiguration { + public DefaultParserConfiguration() { + super(); + registerDefaultDataSources(); + } + + /** + * Register built-in data sources (currently CSV, RDF, SPARQL). + */ + private void registerDefaultDataSources() { + registerDataSource("load-csv", new CsvFileDataSourceDeclarationHandler()); + registerDataSource("load-rdf", new RdfFileDataSourceDeclarationHandler()); + registerDataSource("sparql", new SparqlQueryResultDataSourceDeclarationHandler()); + } +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index e810aa930..cf04c68f4 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -24,9 +24,6 @@ import java.util.List; import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.parser.datasources.CsvFileDataSourceDeclarationHandler; -import org.semanticweb.vlog4j.parser.datasources.RdfFileDataSourceDeclarationHandler; -import org.semanticweb.vlog4j.parser.datasources.SparqlQueryResultDataSourceDeclarationHandler; import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; /** @@ -35,10 +32,6 @@ * @author Maximilian Marx */ public class ParserConfiguration { - public ParserConfiguration() { - registerDefaultDataSources(); - } - /** * Register a new Data Source. * @@ -83,16 +76,6 @@ public DataSource parseDataSourceDeclaration(String name, List args, return handler.handleDeclaration(args, subParserFactory); } - /** - * Register built-in data sources (currently CSV, RDF, SPARQL). - */ - private void registerDefaultDataSources() { - registerDataSource("load-csv", new CsvFileDataSourceDeclarationHandler()); - registerDataSource("load-rdf", new RdfFileDataSourceDeclarationHandler()); - registerDataSource("sparql", new SparqlQueryResultDataSourceDeclarationHandler()); - } - - /** * The registered data sources. */ diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 8902a4c08..dcd01197c 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -1,7 +1,5 @@ package org.semanticweb.vlog4j.parser.javacc; -import java.util.ArrayList; - /*- * #%L * vlog4j-parser @@ -33,6 +31,7 @@ import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.model.implementation.LanguageStringConstantImpl; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.parser.DefaultParserConfiguration; import org.semanticweb.vlog4j.parser.LocalPrefixDeclarations; import org.semanticweb.vlog4j.parser.ParserConfiguration; import org.semanticweb.vlog4j.core.model.api.Predicate; @@ -55,7 +54,7 @@ public class JavaCCParserBase { PrefixDeclarations prefixDeclarations; KnowledgeBase knowledgeBase; - ParserConfiguration parserConfiguration; + ParserConfiguration parserConfiguration; /** * "Local" variable to remember (universal) body variables during parsing. @@ -92,7 +91,7 @@ public enum FormulaContext { public JavaCCParserBase() { this.knowledgeBase = new KnowledgeBase(); this.prefixDeclarations = new LocalPrefixDeclarations(); - this.parserConfiguration = new ParserConfiguration(); + this.parserConfiguration = new DefaultParserConfiguration(); } Constant createIntegerConstant(String lexicalForm) { @@ -111,8 +110,8 @@ void addDataSource(String predicateName, int arity, DataSource dataSource) throw if (dataSource.getRequiredArity().isPresent()) { Integer requiredArity = dataSource.getRequiredArity().get(); if (requiredArity != arity) { - throw new ParseException("Invalid arity " + arity + " for data source, " - + "expected " + requiredArity + "."); + throw new ParseException( + "Invalid arity " + arity + " for data source, " + "expected " + requiredArity + "."); } } @@ -245,13 +244,13 @@ public KnowledgeBase getKnowledgeBase() { return knowledgeBase; } - public void setParserConfiguration(ParserConfiguration parserConfiguration) { - this.parserConfiguration = parserConfiguration; - } + public void setParserConfiguration(ParserConfiguration parserConfiguration) { + this.parserConfiguration = parserConfiguration; + } - public ParserConfiguration getParserConfiguration() { - return parserConfiguration; - } + public ParserConfiguration getParserConfiguration() { + return parserConfiguration; + } protected void setPrefixDeclarations(PrefixDeclarations prefixDeclarations) { this.prefixDeclarations = prefixDeclarations; From a81208ae749eb4e679124f4187bd0ff3d11e7a09 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 12 Nov 2019 20:33:31 +0100 Subject: [PATCH 0429/1003] Parser: Allow for custom DatatypeConstants --- .../parser/DatatypeConstantHandler.java | 42 ++++++ .../vlog4j/parser/ParserConfiguration.java | 64 +++++++++ .../semanticweb/vlog4j/parser/RuleParser.java | 134 +++++++++++------- .../vlog4j/parser/javacc/JavaCCParser.jj | 16 +-- .../parser/javacc/JavaCCParserBase.java | 50 +++---- .../vlog4j/syntax/parser/RuleParserTest.java | 23 ++- 6 files changed, 247 insertions(+), 82 deletions(-) create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DatatypeConstantHandler.java diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DatatypeConstantHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DatatypeConstantHandler.java new file mode 100644 index 000000000..7f8ffc3e7 --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DatatypeConstantHandler.java @@ -0,0 +1,42 @@ +package org.semanticweb.vlog4j.parser; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; + +/** + * Handler for parsing a custom Data Source declaration. + * + * @author Maximilian Marx + */ +@FunctionalInterface +public interface DatatypeConstantHandler { + /** + * Parse a Data Source Declaration. + * + * @param lexicalForm lexical representation of the constant. + * + * @throws ParsingException when the given representation is invalid for this datatype. + + * @return + */ + public DatatypeConstant createConstant(String lexicalForm) throws ParsingException; +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index cf04c68f4..1aec212b0 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -23,7 +23,10 @@ import java.util.HashMap; import java.util.List; +import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; /** @@ -76,8 +79,69 @@ public DataSource parseDataSourceDeclaration(String name, List args, return handler.handleDeclaration(args, subParserFactory); } + /** + * Parse a constant with optional data type and language tag. + * + * @param lexicalForm the (unescaped) lexical form of the constant. + * @param languageTag the language tag, or null if not present. + * @param the datatype, or null if not present. + * @note At most one of {@code languageTag} and {@code datatype} may be + * non-null. + * + * @throws ParsingException when the lexical form is invalid for the + * given data type. + * @throws IllegalArgumentException when neither {@code languageTag} and + * {@code datatype} are null. + * @return the {@link Constant} corresponding to the given arguments. + */ + public Constant parseConstant(String lexicalForm, String languageTag, String datatype) + throws ParsingException, IllegalArgumentException { + if (languageTag != null && datatype != null) { + throw new IllegalArgumentException( + "A constant with a language tag may not explicitly specify a data type."); + } else if (languageTag != null) { + return Expressions.makeLanguageStringConstant(lexicalForm, languageTag); + } else { + String type = ((datatype != null) ? datatype : PrefixDeclarations.XSD_STRING); + DatatypeConstantHandler handler = datatypes.get(type); + + if (handler != null) { + return handler.createConstant(lexicalForm); + } + + return Expressions.makeDatatypeConstant(lexicalForm, type); + } + } + + /** + * Register a new data type. + * + * @param name the IRI representing the data type. + * @param handler a {@link DatatypeConstantHandler} that parses a syntactic form + * into a {@link Constant}. + * + * @throws IllegalArgumentException when the data type name has already been + * registered. + * + * @return this + */ + public ParserConfiguration registerDatatype(String name, DatatypeConstantHandler handler) + throws IllegalArgumentException { + if (datatypes.containsKey(name)) { + throw new IllegalArgumentException("Data type \"" + name + "\" is already registered."); + } + + this.datatypes.put(name, handler); + return this; + } + /** * The registered data sources. */ private HashMap dataSources = new HashMap<>(); + + /** + * The registered datatypes. + */ + private HashMap datatypes = new HashMap<>(); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index 129537296..c3ba4160d 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -28,6 +28,7 @@ import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.SyntaxObject; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.javacc.JavaCCParser; import org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase.FormulaContext; @@ -46,18 +47,21 @@ public class RuleParser { private static Logger LOGGER = LoggerFactory.getLogger(RuleParser.class); - public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, final String encoding, final ParserConfiguration parserConfiguration) throws ParsingException { - final JavaCCParser parser = new JavaCCParser(stream, encoding); - parser.setKnowledgeBase(knowledgeBase); - parser.setParserConfiguration(parserConfiguration); - doParse(parser); - } + public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, final String encoding, + final ParserConfiguration parserConfiguration) throws ParsingException { + final JavaCCParser parser = new JavaCCParser(stream, encoding); + parser.setKnowledgeBase(knowledgeBase); + parser.setParserConfiguration(parserConfiguration); + doParse(parser); + } - public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, final ParserConfiguration parserConfiguration) throws ParsingException { + public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, + final ParserConfiguration parserConfiguration) throws ParsingException { parseInto(knowledgeBase, stream, "UTF-8", parserConfiguration); } - public static void parseInto(final KnowledgeBase knowledgeBase, final String input, final ParserConfiguration parserConfiguration) throws ParsingException { + public static void parseInto(final KnowledgeBase knowledgeBase, final String input, + final ParserConfiguration parserConfiguration) throws ParsingException { final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); parseInto(knowledgeBase, inputStream, "UTF-8", parserConfiguration); } @@ -78,20 +82,23 @@ public static void parseInto(final KnowledgeBase knowledgeBase, final String inp parseInto(knowledgeBase, inputStream, "UTF-8"); } - public static KnowledgeBase parse(final InputStream stream, final String encoding, final ParserConfiguration parserConfiguration) throws ParsingException { - JavaCCParser parser = new JavaCCParser(stream, encoding); - parser.setParserConfiguration(parserConfiguration); - return doParse(parser); - } + public static KnowledgeBase parse(final InputStream stream, final String encoding, + final ParserConfiguration parserConfiguration) throws ParsingException { + JavaCCParser parser = new JavaCCParser(stream, encoding); + parser.setParserConfiguration(parserConfiguration); + return doParse(parser); + } - public static KnowledgeBase parse(final InputStream stream, final ParserConfiguration parserConfiguration) throws ParsingException { - return parse(stream, "UTF-8", parserConfiguration); - } + public static KnowledgeBase parse(final InputStream stream, final ParserConfiguration parserConfiguration) + throws ParsingException { + return parse(stream, "UTF-8", parserConfiguration); + } - public static KnowledgeBase parse(final String input, final ParserConfiguration parserConfiguration) throws ParsingException { - final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); - return parse(inputStream, "UTF-8", parserConfiguration); - } + public static KnowledgeBase parse(final String input, final ParserConfiguration parserConfiguration) + throws ParsingException { + final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); + return parse(inputStream, "UTF-8", parserConfiguration); + } public static KnowledgeBase parse(final InputStream stream, final String encoding) throws ParsingException { return doParse(new JavaCCParser(stream, encoding)); @@ -106,48 +113,79 @@ public static KnowledgeBase parse(final String input) throws ParsingException { return parse(inputStream, "UTF-8"); } - public static Rule parseRule(final String input) throws ParsingException { + /** + * Interface for a method parsing a {@link SyntaxObject}. + * + * This is needed to specify the exceptions thrown by the parse method. + */ + @FunctionalInterface + interface SyntaxObjectParser { + T parse(final JavaCCParser parser) throws ParseException, PrefixDeclarationException, TokenMgrError; + } + + /** + * Parse a {@link SyntaxObject}. + * + * @param input Input string. + * @param parserAction Parsing method for the {@code T}. + * @param syntaxObjectType Description of the type {@code T} being parsed. + * @param parserConfiguration {@link ParserConfiguration} instance, or null. + * + * @throws ParsingException when an error during parsing occurs. + * @return an appropriate instance of {@code T} + */ + static T parseSyntaxObject(final String input, SyntaxObjectParser parserAction, + final String syntaxObjectType, final ParserConfiguration parserConfiguration) throws ParsingException { final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); final JavaCCParser localParser = new JavaCCParser(inputStream, "UTF-8"); + + if (parserConfiguration != null) { + localParser.setParserConfiguration(parserConfiguration); + } + try { - return localParser.rule(); + return parserAction.parse(localParser); } catch (ParseException | PrefixDeclarationException | TokenMgrError e) { - LOGGER.error("Exception while parsing rule: {}!", input); - throw new ParsingException("Exception while parsing rule", e); + LOGGER.error("Exception while parsing " + syntaxObjectType + ": {}!", input); + throw new ParsingException("Exception while parsing " + syntaxObjectType, e); } } + public static Rule parseRule(final String input, final ParserConfiguration parserConfiguration) + throws ParsingException { + return parseSyntaxObject(input, JavaCCParser::rule, "rule", parserConfiguration); + } + + public static Rule parseRule(final String input) throws ParsingException { + return parseRule(input, null); + } + + public static Literal parseLiteral(final String input, final ParserConfiguration parserConfiguration) + throws ParsingException { + return parseSyntaxObject(input, parser -> parser.literal(FormulaContext.HEAD), "literal", parserConfiguration); + } + public static Literal parseLiteral(final String input) throws ParsingException { - final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); - final JavaCCParser localParser = new JavaCCParser(inputStream, "UTF-8"); - try { - return localParser.literal(FormulaContext.HEAD); - } catch (ParseException | PrefixDeclarationException | TokenMgrError e) { - LOGGER.error("Exception while parsing literal: {}!", input); - throw new ParsingException("Exception while parsing literal", e); - } + return parseLiteral(input, null); + } + + public static PositiveLiteral parsePositiveLiteral(final String input, + final ParserConfiguration parserConfiguration) throws ParsingException { + return parseSyntaxObject(input, parser -> parser.positiveLiteral(FormulaContext.HEAD), "positivel literal", + parserConfiguration); } public static PositiveLiteral parsePositiveLiteral(final String input) throws ParsingException { - final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); - final JavaCCParser localParser = new JavaCCParser(inputStream, "UTF-8"); - try { - return localParser.positiveLiteral(FormulaContext.HEAD); - } catch (ParseException | PrefixDeclarationException | TokenMgrError e) { - LOGGER.error("Exception while parsing positive literal: {}!", input); - throw new ParsingException("Exception while parsing positive literal", e); - } + return parsePositiveLiteral(input, null); + } + + public static Fact parseFact(final String input, final ParserConfiguration parserConfiguration) + throws ParsingException { + return parseSyntaxObject(input, parser -> parser.fact(FormulaContext.HEAD), "fact", parserConfiguration); } public static Fact parseFact(final String input) throws ParsingException { - final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); - final JavaCCParser localParser = new JavaCCParser(inputStream, "UTF-8"); - try { - return localParser.fact(FormulaContext.HEAD); - } catch (ParseException | PrefixDeclarationException | TokenMgrError e) { - LOGGER.error("Exception while parsing fact: {}!", input); - throw new ParsingException("Exception while parsing fact: {}!", e); - } + return parseFact(input, null); } static KnowledgeBase doParse(final JavaCCParser parser) throws ParsingException { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 022a1815a..46e15b71b 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -36,6 +36,7 @@ import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; @@ -259,8 +260,8 @@ Term term(FormulaContext context) throws PrefixDeclarationException: String s; Constant c; } -{ //TODO move Expressions.makeConstant to JavaCCParserBase - s = IRI(false) { return Expressions.makeAbstractConstant(s); } +{ + s = IRI(false) { return createConstant(s); } | c = NumericLiteral() { return c; } | c = RDFLiteral() { return c; } | t = < UNIVAR > @@ -281,7 +282,7 @@ Term term(FormulaContext context) throws PrefixDeclarationException: throw new ParseException("Existentialy quantified variables can not appear in the body. Line: " + t.beginLine + ", Column: "+ t.beginColumn); return Expressions.makeExistentialVariable(s); } -| t = < VARORPREDNAME > { return Expressions.makeAbstractConstant(prefixDeclarations.absolutize(t.image));} +| t = < VARORPREDNAME > { return createConstant(t.image); } } /** [16] */ @@ -290,21 +291,20 @@ Constant NumericLiteral() : Token t; } { - t = < INTEGER > { return createIntegerConstant(t.image); } -| t = < DECIMAL > { return createDecimalConstant(t.image); } -| t = < DOUBLE > { return createDoubleConstant(t.image); } + t = < INTEGER > { return createConstant(t.image, PrefixDeclarations.XSD_INTEGER); } +| t = < DECIMAL > { return createConstant(t.image, PrefixDeclarations.XSD_DECIMAL); } +| t = < DOUBLE > { return createConstant(t.image, PrefixDeclarations.XSD_DOUBLE); } } Constant RDFLiteral() throws PrefixDeclarationException: { - Token t; String lex = null; String lang = null; // Optional lang tag and datatype. String dt = null; } { lex = String() ( lang = Langtag() | < DATATYPE > dt = IRI(false) )? - { return createDataConstant(lex, lang, dt); } + { return createConstant(lex, lang, dt); } } String Langtag() : diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index dcd01197c..7abc4ad4f 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -23,6 +23,7 @@ import java.util.HashSet; import java.util.List; +import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; @@ -34,6 +35,7 @@ import org.semanticweb.vlog4j.parser.DefaultParserConfiguration; import org.semanticweb.vlog4j.parser.LocalPrefixDeclarations; import org.semanticweb.vlog4j.parser.ParserConfiguration; +import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.core.model.api.Predicate; /** @@ -94,16 +96,33 @@ public JavaCCParserBase() { this.parserConfiguration = new DefaultParserConfiguration(); } - Constant createIntegerConstant(String lexicalForm) { - return Expressions.makeDatatypeConstant(lexicalForm, PrefixDeclarations.XSD_INTEGER); + Constant createConstant(String lexicalForm) throws ParseException { + try { + return Expressions.makeAbstractConstant(prefixDeclarations.absolutize(lexicalForm)); + } catch (PrefixDeclarationException e) { + throw new ParseException(e.getMessage()); + } } - Constant createDecimalConstant(String lexicalForm) { - return Expressions.makeDatatypeConstant(lexicalForm, PrefixDeclarations.XSD_DECIMAL); + Constant createConstant(String lexicalForm, String datatype) throws ParseException { + return createConstant(lexicalForm, null, datatype); } - Constant createDoubleConstant(String lexicalForm) { - return Expressions.makeDatatypeConstant(lexicalForm, PrefixDeclarations.XSD_DOUBLE); + + /** + * Creates a suitable {@link Constant} from the parsed data. + * + * @param string the string data (unescaped) + * @param languageTag the language tag, or null if not present + * @param datatype the datatype, or null if not provided + * @return suitable constant + */ + Constant createConstant(String lexicalForm, String languageTag, String datatype) throws ParseException { + try { + return parserConfiguration.parseConstant(lexicalForm, languageTag, datatype); + } catch (ParsingException e) { + throw new ParseException(e.getMessage()); + } } void addDataSource(String predicateName, int arity, DataSource dataSource) throws ParseException { @@ -208,25 +227,6 @@ static String stripChars(String s, int n) { return s.substring(n, s.length()); } - /** - * Creates a suitable {@link Constant} from the parsed data. - * - * @param string the string data (unescaped) - * @param languageTag the language tag, or null if not present - * @param datatype the datatype, or null if not provided - * @return suitable constant - */ - Constant createDataConstant(String string, String languageTag, String datatype) { - // https://www.w3.org/TR/turtle/#grammar-production-String RDFLiteral - if (datatype != null) { - return new DatatypeConstantImpl(string, datatype); - } else if (languageTag != null) { - return new LanguageStringConstantImpl(string, languageTag); - } else { - return new DatatypeConstantImpl(string, "http://www.w3.org/2001/XMLSchema#string"); - } - } - /** * Reset the local set variables used when parsing a rule. */ diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 6abc8647a..13a8d9cd5 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -19,15 +19,19 @@ * limitations under the License. * #L% */ -import static org.junit.Assert.assertEquals; + +import static org.junit.Assert.*; +import static org.mockito.Mockito.*; import java.util.ArrayList; import java.util.Arrays; import org.junit.Test; +import org.mockito.ArgumentMatchers; import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; @@ -36,6 +40,8 @@ import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.parser.DatatypeConstantHandler; +import org.semanticweb.vlog4j.parser.ParserConfiguration; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; @@ -374,4 +380,19 @@ public void predicateAbsoluteIRITest() throws ParsingException { Fact f2 = Expressions.makeFact("a:b", a); assertEquals(f, f2); } + + @Test + public void testCustomDatatype() throws ParsingException { + final String typename = "http://example.org/#test"; + DatatypeConstant constant = Expressions.makeDatatypeConstant("test", typename); + DatatypeConstantHandler handler = mock(DatatypeConstantHandler.class); + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerDatatype(typename, handler); + doReturn(constant).when(handler).createConstant(ArgumentMatchers.eq("hello, world")); + + String input = "p(\"hello, world\"^^<" + typename + ">) ."; + Literal literal = RuleParser.parseLiteral(input, parserConfiguration); + DatatypeConstant result = (DatatypeConstant) literal.getConstants().toArray()[0]; + assertEquals(constant, result); + } } From bcffd16f040bd47b0d7de603340a549d5b9037ff Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 19 Nov 2019 13:01:04 +0100 Subject: [PATCH 0430/1003] Parser: Force consumption of all input when parsing SyntaxObjects --- .../semanticweb/vlog4j/parser/RuleParser.java | 4 +++- .../vlog4j/parser/javacc/JavaCCParser.jj | 4 ++++ .../vlog4j/syntax/parser/RuleParserTest.java | 23 ++++++++++++++++++- 3 files changed, 29 insertions(+), 2 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index c3ba4160d..0dfa67778 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -144,7 +144,9 @@ static T parseSyntaxObject(final String input, SyntaxOb } try { - return parserAction.parse(localParser); + T result = parserAction.parse(localParser); + localParser.ensureEndOfInput(); + return result; } catch (ParseException | PrefixDeclarationException | TokenMgrError e) { LOGGER.error("Exception while parsing " + syntaxObjectType + ": {}!", input); throw new ParsingException("Exception while parsing " + syntaxObjectType, e); diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 46e15b71b..8593d2421 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -49,6 +49,10 @@ public class JavaCCParser extends JavaCCParserBase private SubParserFactory getSubParserFactory() { return new SubParserFactory(this); } + + public void ensureEndOfInput() throws ParseException { + jj_consume_token(EOF); + } } PARSER_END(JavaCCParser) diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 13a8d9cd5..11db42fd6 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -365,6 +365,27 @@ public void testBlankPredicateName() throws ParsingException { RuleParser.parse(input); } + @Test(expected = ParsingException.class) + public void testInvalidDatatypeOnLiteral() throws ParsingException { + final String input = "P(\"a\")^^whatever"; + RuleParser.parseLiteral(input); + } + + @Test(expected = ParsingException.class) + public void testNonIriTypeInDatatypeLiteral() throws ParsingException { + final String input = "P(\"a\"^^whatever)"; + RuleParser.parseLiteral(input); + } + + @Test + public void testIriTypeInDatatypeLiteral() throws ParsingException { + final String iri = "whatever"; + final String input = "P(\"a\"^^<" + iri + ">)"; + Literal literal = RuleParser.parseLiteral(input); + DatatypeConstant result = (DatatypeConstant) literal.getConstants().toArray()[0]; + assertEquals(iri, result.getDatatype()); + } + @Test public void predicateRelativeNumericIRITest() throws ParsingException { AbstractConstantImpl a = new AbstractConstantImpl("a"); @@ -390,7 +411,7 @@ public void testCustomDatatype() throws ParsingException { parserConfiguration.registerDatatype(typename, handler); doReturn(constant).when(handler).createConstant(ArgumentMatchers.eq("hello, world")); - String input = "p(\"hello, world\"^^<" + typename + ">) ."; + String input = "p(\"hello, world\"^^<" + typename + ">)"; Literal literal = RuleParser.parseLiteral(input, parserConfiguration); DatatypeConstant result = (DatatypeConstant) literal.getConstants().toArray()[0]; assertEquals(constant, result); From 9b11a9ff37ce71f81f4521dbdb366f924b5b9f15 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 25 Nov 2019 16:32:15 +0100 Subject: [PATCH 0431/1003] Parser: add helpers to parse DataSourceDeclarations and Terms --- .../semanticweb/vlog4j/parser/RuleParser.java | 76 +++++++++++++++---- .../parser/RuleParserDataSourceTest.java | 29 ++----- .../vlog4j/syntax/parser/RuleParserTest.java | 67 +++++++--------- 3 files changed, 97 insertions(+), 75 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index 0dfa67778..5d942435a 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -22,13 +22,16 @@ import java.io.ByteArrayInputStream; import java.io.InputStream; +import java.util.List; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; +import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.SyntaxObject; +import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.javacc.JavaCCParser; import org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase.FormulaContext; @@ -114,28 +117,29 @@ public static KnowledgeBase parse(final String input) throws ParsingException { } /** - * Interface for a method parsing a {@link SyntaxObject}. + * Interface for a method parsing a fragment of the supported syntax. * * This is needed to specify the exceptions thrown by the parse method. */ @FunctionalInterface - interface SyntaxObjectParser { - T parse(final JavaCCParser parser) throws ParseException, PrefixDeclarationException, TokenMgrError; + interface SyntaxFragmentParser { + T parse(final JavaCCParser parser) + throws ParsingException, ParseException, PrefixDeclarationException, TokenMgrError; } /** - * Parse a {@link SyntaxObject}. + * Parse a syntax fragment. * * @param input Input string. * @param parserAction Parsing method for the {@code T}. - * @param syntaxObjectType Description of the type {@code T} being parsed. + * @param syntaxFragmentType Description of the type {@code T} being parsed. * @param parserConfiguration {@link ParserConfiguration} instance, or null. * * @throws ParsingException when an error during parsing occurs. * @return an appropriate instance of {@code T} */ - static T parseSyntaxObject(final String input, SyntaxObjectParser parserAction, - final String syntaxObjectType, final ParserConfiguration parserConfiguration) throws ParsingException { + static T parseSyntaxFragment(final String input, SyntaxFragmentParser parserAction, + final String syntaxFragmentType, final ParserConfiguration parserConfiguration) throws ParsingException { final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); final JavaCCParser localParser = new JavaCCParser(inputStream, "UTF-8"); @@ -148,14 +152,14 @@ static T parseSyntaxObject(final String input, SyntaxOb localParser.ensureEndOfInput(); return result; } catch (ParseException | PrefixDeclarationException | TokenMgrError e) { - LOGGER.error("Exception while parsing " + syntaxObjectType + ": {}!", input); - throw new ParsingException("Exception while parsing " + syntaxObjectType, e); + LOGGER.error("Exception while parsing " + syntaxFragmentType + ": {}!", input); + throw new ParsingException("Exception while parsing " + syntaxFragmentType, e); } } public static Rule parseRule(final String input, final ParserConfiguration parserConfiguration) throws ParsingException { - return parseSyntaxObject(input, JavaCCParser::rule, "rule", parserConfiguration); + return parseSyntaxFragment(input, JavaCCParser::rule, "rule", parserConfiguration); } public static Rule parseRule(final String input) throws ParsingException { @@ -164,7 +168,8 @@ public static Rule parseRule(final String input) throws ParsingException { public static Literal parseLiteral(final String input, final ParserConfiguration parserConfiguration) throws ParsingException { - return parseSyntaxObject(input, parser -> parser.literal(FormulaContext.HEAD), "literal", parserConfiguration); + return parseSyntaxFragment(input, parser -> parser.literal(FormulaContext.HEAD), "literal", + parserConfiguration); } public static Literal parseLiteral(final String input) throws ParsingException { @@ -173,7 +178,7 @@ public static Literal parseLiteral(final String input) throws ParsingException { public static PositiveLiteral parsePositiveLiteral(final String input, final ParserConfiguration parserConfiguration) throws ParsingException { - return parseSyntaxObject(input, parser -> parser.positiveLiteral(FormulaContext.HEAD), "positivel literal", + return parseSyntaxFragment(input, parser -> parser.positiveLiteral(FormulaContext.HEAD), "positivel literal", parserConfiguration); } @@ -183,13 +188,41 @@ public static PositiveLiteral parsePositiveLiteral(final String input) throws Pa public static Fact parseFact(final String input, final ParserConfiguration parserConfiguration) throws ParsingException { - return parseSyntaxObject(input, parser -> parser.fact(FormulaContext.HEAD), "fact", parserConfiguration); + return parseSyntaxFragment(input, parser -> parser.fact(FormulaContext.HEAD), "fact", parserConfiguration); } public static Fact parseFact(final String input) throws ParsingException { return parseFact(input, null); } + public static Term parseTerm(final String input, final FormulaContext context, + final ParserConfiguration parserConfiguration) throws ParsingException { + return parseSyntaxFragment(input, parser -> parser.term(context), "term", parserConfiguration); + } + + public static Term parseTerm(final String input, final ParserConfiguration parserConfiguration) + throws ParsingException { + return parseTerm(input, FormulaContext.HEAD, parserConfiguration); + } + + public static Term parseTerm(final String input, final FormulaContext context) throws ParsingException { + return parseTerm(input, context, null); + } + + public static Term parseTerm(final String input) throws ParsingException { + return parseTerm(input, (ParserConfiguration) null); + } + + public static DataSource parseDataSourceDeclaration(final String input, ParserConfiguration parserConfiguration) + throws ParsingException { + return parseSyntaxFragment(input, RuleParser::parseAndExtractDatasourceDeclaration, "data source declaration", + parserConfiguration); + } + + public static DataSource parseDataSourceDeclaration(final String input) throws ParsingException { + return parseDataSourceDeclaration(input, null); + } + static KnowledgeBase doParse(final JavaCCParser parser) throws ParsingException { try { parser.parse(); @@ -200,4 +233,19 @@ static KnowledgeBase doParse(final JavaCCParser parser) throws ParsingException return parser.getKnowledgeBase(); } + protected static DataSource parseAndExtractDatasourceDeclaration(final JavaCCParser parser) + throws ParsingException, ParseException, PrefixDeclarationException { + parser.source(); + + final List dataSourceDeclarations = parser.getKnowledgeBase() + .getDataSourceDeclarations(); + + if (dataSourceDeclarations.size() != 1) { + throw new ParsingException( + "Unexpected number of data source declarations: " + dataSourceDeclarations.size()); + } + + return dataSourceDeclarations.get(0).getDataSource(); + } + } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java index aee340a60..ff6029184 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java @@ -27,17 +27,11 @@ import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; -import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.junit.Test; import org.mockito.ArgumentMatchers; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Statement; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; @@ -51,50 +45,41 @@ public class RuleParserDataSourceTest { @Test public void testCsvSource() throws ParsingException, IOException { String input = "@source p(2) : load-csv(\"src/main/data/input/example.csv\") ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); CsvFileDataSource csvds = new CsvFileDataSource(new File("src/main/data/input/example.csv")); - Predicate p = Expressions.makePredicate("p", 2); - DataSourceDeclaration d = new DataSourceDeclarationImpl(p, csvds); - assertEquals(Arrays.asList(d), statements); + assertEquals(csvds, RuleParser.parseDataSourceDeclaration(input)); } @Test public void testRdfSource() throws ParsingException, IOException { String input = "@source p(3) : load-rdf(\"src/main/data/input/example.nt.gz\") ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); RdfFileDataSource rdfds = new RdfFileDataSource(new File("src/main/data/input/example.nt.gz")); - Predicate p = Expressions.makePredicate("p", 3); - DataSourceDeclaration d = new DataSourceDeclarationImpl(p, rdfds); - assertEquals(Arrays.asList(d), statements); + assertEquals(rdfds, RuleParser.parseDataSourceDeclaration(input)); } @Test(expected = ParsingException.class) public void testRdfSourceInvalidArity() throws ParsingException, IOException { String input = "@source p(2) : load-rdf(\"src/main/data/input/example.nt.gz\") ."; - RuleParser.parse(input); + RuleParser.parseDataSourceDeclaration(input); } @Test public void testSparqlSource() throws ParsingException, MalformedURLException { String input = "@source p(2) : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); SparqlQueryResultDataSource sparqlds = new SparqlQueryResultDataSource( new URL("https://query.wikidata.org/sparql"), "disease, doid", "?disease wdt:P699 ?doid ."); - Predicate p = Expressions.makePredicate("p", 2); - DataSourceDeclaration d = new DataSourceDeclarationImpl(p, sparqlds); - assertEquals(Arrays.asList(d), statements); + assertEquals(sparqlds, RuleParser.parseDataSourceDeclaration(input)); } @Test(expected = ParsingException.class) public void testSparqlSourceMalformedUrl() throws ParsingException, MalformedURLException { String input = "@source p(2) : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; - RuleParser.parse(input); + RuleParser.parseDataSourceDeclaration(input); } @Test(expected = ParsingException.class) public void testUnknownDataSource() throws ParsingException { String input = "@source p(2) : unknown-data-source(\"hello, world\") ."; - RuleParser.parse(input); + RuleParser.parseDataSourceDeclaration(input); } @Test @@ -108,7 +93,7 @@ public void testCustomDataSource() throws ParsingException { String input = "@source p(2) : mock-source(\"hello\", \"world\") ."; List expectedArguments = Arrays.asList("hello", "world"); - RuleParser.parse(input, parserConfiguration); + RuleParser.parseDataSourceDeclaration(input, parserConfiguration); verify(handler).handleDeclaration(eq(expectedArguments), ArgumentMatchers.any()); } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 11db42fd6..58bab4a65 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -186,11 +186,10 @@ public void testNoDollarVariables() throws ParsingException { @Test public void testIntegerLiteral() throws ParsingException { - String input = "p(42) ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + String input = "p(42)"; PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("42", PrefixDeclarations.XSD_INTEGER)); - assertEquals(Arrays.asList(integerLiteral), statements); + assertEquals(integerLiteral, RuleParser.parseLiteral(input)); } @Test @@ -204,90 +203,81 @@ public void testAbbreviatedIntegerLiteral() throws ParsingException { @Test public void testFullIntegerLiteral() throws ParsingException { - String input = "p(\"42\"^^<" + PrefixDeclarations.XSD_INTEGER + "> ) ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + String input = "p(\"42\"^^<" + PrefixDeclarations.XSD_INTEGER + "> )"; PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("42", PrefixDeclarations.XSD_INTEGER)); - assertEquals(Arrays.asList(integerLiteral), statements); + assertEquals(integerLiteral, RuleParser.parseLiteral(input)); } @Test public void testDecimalLiteral() throws ParsingException { - String input = "p(-5.0) ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + String input = "p(-5.0)"; PositiveLiteral decimalLiteral = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("-5.0", PrefixDeclarations.XSD_DECIMAL)); - assertEquals(Arrays.asList(decimalLiteral), statements); + assertEquals(decimalLiteral, RuleParser.parseLiteral(input)); } @Test public void testDoubleLiteral() throws ParsingException { - String input = "p(4.2E9) ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + String input = "p(4.2E9)"; PositiveLiteral doubleLiteral = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("4.2E9", PrefixDeclarations.XSD_DOUBLE)); - assertEquals(Arrays.asList(doubleLiteral), statements); + assertEquals(doubleLiteral, RuleParser.parseLiteral(input)); } @Test public void testStringLiteral() throws ParsingException { - String input = "p(\"abc\") ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(fact2), statements); + String input = "p(\"abc\")"; + assertEquals(fact2, RuleParser.parseLiteral(input)); } @Test(expected = ParsingException.class) public void testIncompleteStringLiteral() throws ParsingException { - String input = "p(\"abc) ."; - RuleParser.parse(input); + String input = "p(\"abc)"; + RuleParser.parseLiteral(input); } @Test public void testStringLiteralEscapes() throws ParsingException { - String input = "p(\"_\\\"_\\\\_\\n_\\t_\") ."; // User input: p("_\"_\\_\n_\t_") - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + String input = "p(\"_\\\"_\\\\_\\n_\\t_\")"; // User input: p("_\"_\\_\n_\t_") PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("_\"_\\_\n_\t_", PrefixDeclarations.XSD_STRING)); - assertEquals(Arrays.asList(fact), statements); + assertEquals(fact, RuleParser.parseLiteral(input)); } @Test public void testStringLiteralAllEscapes() throws ParsingException { // User input: p("_\n_\t_\r_\b_\f_\'_\"_\\_") - String input = "p(\"_\\n_\\t_\\r_\\b_\\f_\\'_\\\"_\\\\_\") ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + String input = "p(\"_\\n_\\t_\\r_\\b_\\f_\\'_\\\"_\\\\_\")"; PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("_\n_\t_\r_\b_\f_\'_\"_\\_", PrefixDeclarations.XSD_STRING)); - assertEquals(Arrays.asList(fact), statements); + assertEquals(fact, RuleParser.parseLiteral(input)); } @Test public void testStringLiteralMultiLine() throws ParsingException { - String input = "p('''line 1\n\n" + "line 2\n" + "line 3''') ."; // User input: p("a\"b\\c") - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + String input = "p('''line 1\n\n" + "line 2\n" + "line 3''')"; // User input: p("a\"b\\c") PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("line 1\n\nline 2\nline 3", PrefixDeclarations.XSD_STRING)); - assertEquals(Arrays.asList(fact), statements); + assertEquals(fact, RuleParser.parseLiteral(input)); } @Test(expected = ParsingException.class) public void testIncompleteStringLiteralMultiLine() throws ParsingException { - String input = "p('''abc\ndef'') ."; - RuleParser.parse(input); + String input = "p('''abc\ndef'')"; + RuleParser.parseLiteral(input); } @Test public void testFullLiteral() throws ParsingException { - String input = "p(\"abc\"^^) ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(fact2), statements); + String input = "p(\"abc\"^^)"; + assertEquals(fact2, RuleParser.parseLiteral(input)); } @Test public void testUnicodeLiteral() throws ParsingException { - String input = "p(\"\\u0061\\u0062\\u0063\") ."; // "abc" - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(fact2), statements); + String input = "p(\"\\u0061\\u0062\\u0063\")"; // "abc" + assertEquals(fact2, RuleParser.parseLiteral(input)); } @Test @@ -306,11 +296,10 @@ public void testPrefixedLiteral() throws ParsingException { @Test public void testLangStringLiteral() throws ParsingException { - String input = "p(\"abc\"@en-gb) ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + String input = "p(\"abc\"@en-gb)"; PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeLanguageStringConstant("abc", "en-gb")); - assertEquals(Arrays.asList(fact), statements); + assertEquals(fact, RuleParser.parseLiteral(input)); } @Test @@ -373,8 +362,8 @@ public void testInvalidDatatypeOnLiteral() throws ParsingException { @Test(expected = ParsingException.class) public void testNonIriTypeInDatatypeLiteral() throws ParsingException { - final String input = "P(\"a\"^^whatever)"; - RuleParser.parseLiteral(input); + final String input = "\"a\"^^whatever"; + RuleParser.parseTerm(input); } @Test From 14f12cc8a50380223ebd34677c8a2de7bb13d511 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 4 Dec 2019 15:02:20 +0100 Subject: [PATCH 0432/1003] Require dots at the end of facts --- .../vlog4j/parser/javacc/JavaCCParser.jj | 4 ++-- .../vlog4j/syntax/parser/EntityTest.java | 8 ++++---- .../syntax/parser/RuleParserParseFactTest.java | 14 +++++++------- 3 files changed, 13 insertions(+), 13 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 8593d2421..3881f4bdf 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -138,7 +138,7 @@ void statement() throws PrefixDeclarationException: } { LOOKAHEAD(rule()) statement = rule() { knowledgeBase.addStatement(statement);} -| statement = fact(FormulaContext.HEAD) < DOT > //not from a rule +| statement = fact(FormulaContext.HEAD) //not from a rule { knowledgeBase.addStatement(statement); } @@ -217,7 +217,7 @@ Fact fact(FormulaContext context) throws PrefixDeclarationException: String predicateName; } { - predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > + predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > < DOT > { try { return Expressions.makeFact(predicateName, terms); diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java index 8bd7cb008..c2fb1cfaa 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -139,10 +139,10 @@ public void predicateRoundTripTest3() throws ParsingException { @Test public void iriAngularBracketsTest() throws ParsingException { String constant = "a"; - Fact fact = RuleParser.parseFact("p(" + constant + ")"); + Fact fact = RuleParser.parseFact("p(" + constant + ")."); Term abstractConst = fact.getArguments().get(0); assertEquals(constant, abstractConst.toString()); - Fact fact2 = RuleParser.parseFact("p(<" + constant + ">)"); + Fact fact2 = RuleParser.parseFact("p(<" + constant + ">)."); Term abstractConst2 = fact2.getArguments().get(0); assertEquals(abstractConst, abstractConst2); } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserParseFactTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserParseFactTest.java index d6f423ee0..876b01ae8 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserParseFactTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserParseFactTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -40,28 +40,28 @@ public class RuleParserParseFactTest { @Test public void testFactArityOne() throws ParsingException { - assertEquals(RuleParser.parseFact("p(\"a\")"), factA); + assertEquals(RuleParser.parseFact("p(\"a\") ."), factA); } @Test public void testFactArityOneWithDataType() throws ParsingException { - assertEquals(RuleParser.parseFact("p(\"a\")"), factA); + assertEquals(RuleParser.parseFact("p(\"a\") ."), factA); } @Test public void testFactArityTwo() throws ParsingException { - assertEquals(RuleParser.parseFact("p(\"a\",\"b\")"), factAB); + assertEquals(RuleParser.parseFact("p(\"a\",\"b\") ."), factAB); } @Test(expected = ParsingException.class) public void testFactWithVariable() throws ParsingException { - String input = "p(?X)"; + String input = "p(?X) ."; RuleParser.parseFact(input); } @Test(expected = ParsingException.class) public void testZeroArityFact() throws ParsingException { - String input = "p()"; + String input = "p() ."; RuleParser.parseFact(input); } From e07bf1d7271ae72db09b21f7bb53ea12d03b7338 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 4 Dec 2019 15:40:02 +0100 Subject: [PATCH 0433/1003] Parser: Use brackets in data source declarations for arity Implements #134. --- RELEASE-NOTES.md | 12 +- .../core/model/implementation/Serializer.java | 64 +++---- .../core/model/DataSourceDeclarationTest.java | 12 +- .../vlog4j/core/model/PredicateImplTest.java | 164 +++++++++--------- .../main/data/input/counting-triangles.rls | 3 +- vlog4j-examples/src/main/data/input/doid.rls | 15 +- .../examples/CompareWikidataDBpedia.java | 12 +- .../examples/core/AddDataFromCsvFile.java | 6 +- .../examples/core/AddDataFromRdfFile.java | 2 +- .../vlog4j/parser/javacc/JavaCCParser.jj | 4 +- .../vlog4j/syntax/parser/EntityTest.java | 44 ----- .../parser/RuleParserDataSourceTest.java | 60 +++++-- 12 files changed, 198 insertions(+), 200 deletions(-) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 0819972b3..2e99a8640 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -10,6 +10,7 @@ Breaking changes: * Instead of Variable, ExistentialVariable and UniversalVariable now indicate quantification * Blank was renamed to NamedNull to avoid confusion with RDF blank nodes * Methods to access terms now use Java Streams and are unified across syntactic objects +* Data source declarations now use brackets to denote arity, e.g., `@source predicate[2]: load-csv()` New features: * New module vlog4j-client provides a stand-alone command line client jar for VLog4j @@ -31,17 +32,17 @@ VLog4j v0.4.0 ------------- Breaking changes: -* The Reasoner interface has changed (knowledge base and related methods moved to KnowledgeBase) +* The Reasoner interface has changed (knowledge base and related methods moved to KnowledgeBase) * The EdbIdbSeparation is obsolete and does no longer exist * IRIs loaded from RDF inputs no longer include surrounding < > in their string identifier * A new interface Fact has replaced the overly general PositiveLiteral in many places New features: * New own syntax for rules, facts, and data sources to create knowledge bases from files or strings in Java -* Input predicates can now be used with multiple sources and in rule heads (no more EDB-IDB distinction) +* Input predicates can now be used with multiple sources and in rule heads (no more EDB-IDB distinction) * New InMemoryDataSource for efficient in-memory fact loading * New KnowledgeBase class separates facts, data sources, and rules from the actual Reasoner -* Modifications to the knowledge base are taken into account by the reasoner +* Modifications to the knowledge base are taken into account by the reasoner * New and updated example programs to illustrate use of syntax Other improvements: @@ -52,7 +53,7 @@ Other improvements: * Better code structure and testing Bugfixes: -* Several reasoning errors in VLog (backend) have been discovered and fixed in the version used now +* Several reasoning errors in VLog (backend) have been discovered and fixed in the version used now VLog4j v0.3.0 @@ -68,7 +69,7 @@ VLog4j v0.2.0 ------------- New features: -* supporting File data sources of N-Triples format (.nt file extension) +* supporting File data sources of N-Triples format (.nt file extension) * supporting g-zipped data source files (.csv.gz, .nt.gz) VLog4j v0.1.0 @@ -80,4 +81,3 @@ New features: * Essential data models for rules and facts, and essential reasoner functionality * support for reading from RDF files * support for converting rules from OWL ontology, loaded with the OWL API - diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index ce1d58990..7726b506f 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -11,9 +11,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -44,7 +44,7 @@ /** * A utility class with static methods to obtain the correct parsable string * representation of the different data models. - * + * * @author Ali Elhalawati * */ @@ -55,8 +55,10 @@ public final class Serializer { public static final String EXISTENTIAL_IDENTIFIER = "!"; public static final String UNIVERSAL_IDENTIFIER = "?"; public static final String NAMEDNULL_IDENTIFIER = "_"; - public static final String OPEN_PARENTHESIS = "("; + public static final String OPENING_PARENTHESIS = "("; public static final String CLOSING_PARENTHESIS = ")"; + public static final String OPENING_BRACKET = "["; + public static final String CLOSING_BRACKET = "]"; public static final String RULE_SEPARATOR = " :- "; public static final String AT = "@"; public static final String DATA_SOURCE = "@source "; @@ -85,11 +87,11 @@ private Serializer() { /** * Creates a String representation of a given {@link Rule}. - * + * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param rule a {@link Rule}. * @return String representation corresponding to a given {@link Rule}. - * + * */ public static String getString(final Rule rule) { return getString(rule.getHead()) + RULE_SEPARATOR + getString(rule.getBody()) + STATEMENT_SEPARATOR; @@ -97,7 +99,7 @@ public static String getString(final Rule rule) { /** * Creates a String representation of a given {@link Conjunction}. - * + * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param conjunction a {@link Conjunction} * @return String representation corresponding to a given {@link Conjunction}. @@ -118,7 +120,7 @@ public static String getString(final Conjunction conjunction) /** * Creates a String representation of a given {@link Literal}. - * + * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param literal a {@link Literal} * @return String representation corresponding to a given {@link Literal}. @@ -128,7 +130,7 @@ public static String getString(final Literal literal) { if (literal.isNegated()) { stringBuilder.append(NEGATIVE_IDENTIFIER); } - stringBuilder.append(getIRIString(literal.getPredicate().getName())).append(OPEN_PARENTHESIS); + stringBuilder.append(getIRIString(literal.getPredicate().getName())).append(OPENING_PARENTHESIS); boolean first = true; for (final Term term : literal.getArguments()) { if (first) { @@ -145,7 +147,7 @@ public static String getString(final Literal literal) { /** * Creates a String representation of a given {@link Fact}. - * + * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param fact a {@link Fact} * @return String representation corresponding to a given {@link Fact}. @@ -156,7 +158,7 @@ public static String getFactString(final Fact fact) { /** * Creates a String representation of a given {@link Constant}. - * + * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param constant a {@link Constant} * @return String representation corresponding to a given {@link Constant}. @@ -168,7 +170,7 @@ public static String getString(final AbstractConstant constant) { /** * Creates a String representation corresponding to the name of a given * {@link LanguageStringConstant}. - * + * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param languageStringConstant a {@link LanguageStringConstant} * @return String representation corresponding to the name of a given @@ -181,7 +183,7 @@ public static String getConstantName(final LanguageStringConstant languageString /** * Creates a String representation corresponding to the name of a given * {@link DatatypeConstant} without an IRI. - * + * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param datatypeConstant a {@link DatatypeConstant} * @return String representation corresponding to a given @@ -204,7 +206,7 @@ public static String getString(final DatatypeConstant datatypeConstant) { /** * Creates a String representation corresponding to the name of a given * {@link DatatypeConstant} including an IRI. - * + * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param datatypeConstant a {@link DatatypeConstant} * @return String representation corresponding to a given @@ -217,7 +219,7 @@ public static String getConstantName(final DatatypeConstant datatypeConstant) { /** * Creates a String representation of a given {@link ExistentialVariable}. - * + * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param existentialVariable a {@link ExistentialVariable} * @return String representation corresponding to a given @@ -229,7 +231,7 @@ public static String getString(final ExistentialVariable existentialVariable) { /** * Creates a String representation of a given {@link UniversalVariable}. - * + * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param universalVariable a {@link UniversalVariable} * @return String representation corresponding to a given @@ -241,7 +243,7 @@ public static String getString(final UniversalVariable universalVariable) { /** * Creates a String representation of a given {@link NamedNull}. - * + * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param namedNull a {@link NamedNull} * @return String representation corresponding to a given {@link NamedNull}. @@ -252,18 +254,18 @@ public static String getString(final NamedNull namedNull) { /** * Creates a String representation of a given {@link Predicate}. - * + * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param predicate a {@link Predicate} * @return String representation corresponding to a given {@link Predicate}. */ public static String getString(final Predicate predicate) { - return predicate.getName() + OPEN_PARENTHESIS + predicate.getArity() + CLOSING_PARENTHESIS; + return predicate.getName() + OPENING_BRACKET + predicate.getArity() + CLOSING_BRACKET; } /** * Creates a String representation of a given {@link DataSourceDeclaration}. - * + * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param dataSourceDeclaration a {@link DataSourceDeclaration} * @return String representation corresponding to a given @@ -276,44 +278,44 @@ public static String getString(final DataSourceDeclaration dataSourceDeclaration /** * Creates a String representation of a given {@link CsvFileDataSource}. - * + * * @see <"https://github.com/knowsys/vlog4j/wiki">. - * + * * @param csvFileDataSource * @return String representation corresponding to a given * {@link CsvFileDataSource}. */ public static String getString(final CsvFileDataSource csvFileDataSource) { - return CSV_FILE_DATA_SOURCE + OPEN_PARENTHESIS + getFileString(csvFileDataSource) + CLOSING_PARENTHESIS; + return CSV_FILE_DATA_SOURCE + OPENING_PARENTHESIS + getFileString(csvFileDataSource) + CLOSING_PARENTHESIS; } /** * Creates a String representation of a given {@link RdfFileDataSource}. - * + * * @see <"https://github.com/knowsys/vlog4j/wiki">. - * - * + * + * * @param rdfFileDataSource * @return String representation corresponding to a given * {@link RdfFileDataSource}. */ public static String getString(final RdfFileDataSource rdfFileDataSource) { - return RDF_FILE_DATA_SOURCE + OPEN_PARENTHESIS + getFileString(rdfFileDataSource) + CLOSING_PARENTHESIS; + return RDF_FILE_DATA_SOURCE + OPENING_PARENTHESIS + getFileString(rdfFileDataSource) + CLOSING_PARENTHESIS; } /** * Creates a String representation of a given * {@link SparqlQueryResultDataSource}. - * + * * @see <"https://github.com/knowsys/vlog4j/wiki">. - * - * + * + * * @param dataSource * @return String representation corresponding to a given * {@link SparqlQueryResultDataSource}. */ public static String getString(final SparqlQueryResultDataSource dataSource) { - return SPARQL_QUERY_RESULT_DATA_SOURCE + OPEN_PARENTHESIS + return SPARQL_QUERY_RESULT_DATA_SOURCE + OPENING_PARENTHESIS + addAngleBrackets(dataSource.getEndpoint().toString()) + COMMA + addQuotes(dataSource.getQueryVariables()) + COMMA + addQuotes(dataSource.getQueryBody()) + CLOSING_PARENTHESIS; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java index caf805b82..c3ebcd4fb 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -75,7 +75,7 @@ public void toString_SparqlQueryResultDataSource() throws IOException { new URL("https://example.org/sparql"), "var", "?var wdt:P31 wd:Q5 ."); final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, dataSource); - assertEquals("@source p(3): sparql(, \"var\", \"?var wdt:P31 wd:Q5 .\") .", + assertEquals("@source p[3]: sparql(, \"var\", \"?var wdt:P31 wd:Q5 .\") .", dataSourceDeclaration.toString()); } @@ -91,7 +91,7 @@ public void toString_CsvFileDataSource() throws IOException { unzippedCsvFileDataSource); final String expectedFilePath = relativeDirName + File.separator + fileName; - assertEquals("@source q(1): load-csv(\"" + expectedFilePath + "\") .", dataSourceDeclaration.toString()); + assertEquals("@source q[1]: load-csv(\"" + expectedFilePath + "\") .", dataSourceDeclaration.toString()); } // FIXME: have String representation of files OS independent @@ -103,7 +103,7 @@ public void toString_CsvFileDataSource_absolutePath_windowsPathSeparator() throw final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File(absoluteFilePathWindows)); final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, unzippedCsvFileDataSource); - assertEquals("@source q(1): load-csv(\"D:/input/file.csv\") .", dataSourceDeclaration.toString()); + assertEquals("@source q[1]: load-csv(\"D:/input/file.csv\") .", dataSourceDeclaration.toString()); } @Test @@ -117,6 +117,6 @@ public void toString_RdfFileDataSource_relativePath() throws IOException { unzippedRdfFileDataSource); final String expectedFilePath = relativeDirName + File.separator + fileName; - assertEquals("@source q(1): load-rdf(\"" + expectedFilePath + "\") .", dataSourceDeclaration.toString()); + assertEquals("@source q[1]: load-rdf(\"" + expectedFilePath + "\") .", dataSourceDeclaration.toString()); } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PredicateImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PredicateImplTest.java index 0c750ebb1..508f4c90d 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PredicateImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PredicateImplTest.java @@ -1,82 +1,82 @@ -package org.semanticweb.vlog4j.core.model; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; - -import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; - -public class PredicateImplTest { - - @Test - public void testEquals() { - final Predicate p1 = new PredicateImpl("p", 1); - final Predicate p1too = Expressions.makePredicate("p", 1); - final Predicate p2 = new PredicateImpl("p", 2); - final Predicate q1 = new PredicateImpl("q", 1); - - assertEquals(p1, p1); - assertEquals(p1too, p1); - assertNotEquals(p2, p1); - assertNotEquals(q1, p1); - assertNotEquals(p2.hashCode(), p1.hashCode()); - assertNotEquals(q1.hashCode(), p1.hashCode()); - assertFalse(p1.equals(null)); // written like this for recording coverage properly - assertFalse(p1.equals("p")); // written like this for recording coverage properly - } - - @Test(expected = NullPointerException.class) - public void predicateNameNotNull() { - new PredicateImpl(null, 2); - } - - @Test(expected = IllegalArgumentException.class) - public void predicateNameNotEmpty() { - new PredicateImpl("", 1); - } - - @Test(expected = IllegalArgumentException.class) - public void predicateNameNotWhitespace() { - new PredicateImpl(" ", 1); - } - - @Test(expected = IllegalArgumentException.class) - public void arityNegative() { - new PredicateImpl("p", -1); - } - - @Test(expected = IllegalArgumentException.class) - public void arityZero() { - new PredicateImpl("p", 0); - } - - @Test - public void predicateToStringTest() { - final Predicate p1 = new PredicateImpl("p", 1); - assertEquals("p(1)", p1.toString()); - } - -} +package org.semanticweb.vlog4j.core.model; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; + +import org.junit.Test; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; + +public class PredicateImplTest { + + @Test + public void testEquals() { + final Predicate p1 = new PredicateImpl("p", 1); + final Predicate p1too = Expressions.makePredicate("p", 1); + final Predicate p2 = new PredicateImpl("p", 2); + final Predicate q1 = new PredicateImpl("q", 1); + + assertEquals(p1, p1); + assertEquals(p1too, p1); + assertNotEquals(p2, p1); + assertNotEquals(q1, p1); + assertNotEquals(p2.hashCode(), p1.hashCode()); + assertNotEquals(q1.hashCode(), p1.hashCode()); + assertFalse(p1.equals(null)); // written like this for recording coverage properly + assertFalse(p1.equals("p")); // written like this for recording coverage properly + } + + @Test(expected = NullPointerException.class) + public void predicateNameNotNull() { + new PredicateImpl(null, 2); + } + + @Test(expected = IllegalArgumentException.class) + public void predicateNameNotEmpty() { + new PredicateImpl("", 1); + } + + @Test(expected = IllegalArgumentException.class) + public void predicateNameNotWhitespace() { + new PredicateImpl(" ", 1); + } + + @Test(expected = IllegalArgumentException.class) + public void arityNegative() { + new PredicateImpl("p", -1); + } + + @Test(expected = IllegalArgumentException.class) + public void arityZero() { + new PredicateImpl("p", 0); + } + + @Test + public void predicateToStringTest() { + final Predicate p1 = new PredicateImpl("p", 1); + assertEquals("p[1]", p1.toString()); + } + +} diff --git a/vlog4j-examples/src/main/data/input/counting-triangles.rls b/vlog4j-examples/src/main/data/input/counting-triangles.rls index 223c4a77a..90cf145bb 100644 --- a/vlog4j-examples/src/main/data/input/counting-triangles.rls +++ b/vlog4j-examples/src/main/data/input/counting-triangles.rls @@ -2,7 +2,7 @@ % From Wikidata, get all countries (items with P31 relation to Q6256, or subclasses thereof: P279*) % that border (P47) each other: -@source borders(2): sparql(wdqs:sparql, "country1,country2", +@source borders[2]: sparql(wdqs:sparql, "country1,country2", '''?country1 wdt:P31/wdt:P279* wd:Q6256 . ?country2 wdt:P31/wdt:P279* wd:Q6256 . ?country1 wdt:P47 ?country2 .''') . @@ -15,4 +15,3 @@ country(?X) :- shareBorder(?X, ?Y) . % Compute all triangles: triangle(?X,?Y,?Z) :- shareBorder(?X,?Y), shareBorder(?Y,?Z), shareBorder(?Z,?X) . - diff --git a/vlog4j-examples/src/main/data/input/doid.rls b/vlog4j-examples/src/main/data/input/doid.rls index 545febc5e..e50e4e3ef 100644 --- a/vlog4j-examples/src/main/data/input/doid.rls +++ b/vlog4j-examples/src/main/data/input/doid.rls @@ -1,24 +1,24 @@ @prefix rdfs: . @prefix wdqs: . -@source doidTriple(3): load-rdf("src/main/data/input/doid.nt.gz") . -@source diseaseId(2): sparql(wdqs:sparql, "disease,doid", "?disease wdt:P699 ?doid .") . -@source recentDeaths(1): sparql(wdqs:sparql, "human", +@source doidTriple[3]: load-rdf("src/main/data/input/doid.nt.gz") . +@source diseaseId[2]: sparql(wdqs:sparql, "disease,doid", "?disease wdt:P699 ?doid .") . +@source recentDeaths[1]: sparql(wdqs:sparql, "human", '''?human wdt:P31 wd:Q5; wdt:P570 ?deathDate . FILTER (YEAR(?deathDate) = 2018)''') . -@source recentDeathsCause(2): sparql(wdqs:sparql, "human,causeOfDeath", +@source recentDeathsCause[2]: sparql(wdqs:sparql, "human,causeOfDeath", '''?human wdt:P31 wd:Q5; wdt:P570 ?deathDate ; - wdt:P509 ?causeOfDeath . + wdt:P509 ?causeOfDeath . FILTER (YEAR(?deathDate) = 2018)''') . % Combine recent death data (infer "unknown" cause if no cause given): deathCause(?X, ?Z) :- recentDeathsCause(?X, ?Z) . deathCause(?X, !Z) :- recentDeaths(?X) . -% Mark Wikidata diseases that have a DOID: -hasDoid(?X) :- diseaseId(?X, ?DoidId) . +% Mark Wikidata diseases that have a DOID: +hasDoid(?X) :- diseaseId(?X, ?DoidId) . % Relate DOID string ID (used on Wikidata) to DOID IRI (used in DOID ontology) doid(?Iri, ?DoidId) :- doidTriple(?Iri, ,?DoidId) . @@ -34,4 +34,3 @@ cancerDisease(?Xdoid) :- diseaseHierarchy(?X, ?Y), doid(?Y, "DOID:162"), doid(?X humansWhoDiedOfCancer(?X) :- deathCause(?X, ?Y), diseaseId(?Y, ?Z), cancerDisease(?Z) . humansWhoDiedOfNoncancer(?X) :- deathCause(?X, ?Y), diseaseId(?Y, ?Z), ~cancerDisease(?Z) . humansWhoDiedOfNoncancer(?X) :- deathCause(?X, ?Y), ~hasDoid(?Y) . - diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java index 7e1031f42..f3b3129c2 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -36,10 +36,10 @@ * over. For a fair comparison, we restrict to Wikidata entities that have a * related English Wikipedia page (others cannot be in English DBpedia in the * first place). - * + * * The example query used asks for alumni of the University of Leipzig (one of * the oldest European universities). - * + * * @author Markus Kroetzsch * */ @@ -72,8 +72,8 @@ public static void main(final String[] args) throws ParsingException, IOExceptio final String rules = "" // + "@prefix wdqs: ." // + "@prefix dbp: ." // - + "@source dbpResult(2) : sparql(dbp:sparql, \"result,enwikipage\", '''" + dbpediaSparql + "''') ." // - + "@source wdResult(2) : sparql(wdqs:sparql, \"result,enwikipage\", '''" + wikidataSparql + "''') ." // + + "@source dbpResult[2] : sparql(dbp:sparql, \"result,enwikipage\", '''" + dbpediaSparql + "''') ." // + + "@source wdResult[2]) : sparql(wdqs:sparql, \"result,enwikipage\", '''" + wikidataSparql + "''') ." // + "% Rules:\n" // + "inWd(?Wikipage) :- wdResult(?WdId,?Wikipage)." // + "inDbp(?Wikipage) :- dbpResult(?DbpId,?Wikipage)." // diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java index b68db527b..d493a3eb5 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java @@ -56,11 +56,11 @@ public static void main(final String[] args) throws IOException, ParsingExceptio ExamplesUtils.configureLogging(); final String initialFactsHasPart = ""// a file input: - + "@source hasPart(2) : load-csv(\"" + ExamplesUtils.INPUT_FOLDER + "hasPartEDB.csv.gz\") ."; + + "@source hasPart[2] : load-csv(\"" + ExamplesUtils.INPUT_FOLDER + "hasPartEDB.csv.gz\") ."; final String rules = "" // first declare file inputs: - + "@source bicycle(1) : load-csv(\"" + ExamplesUtils.INPUT_FOLDER + "bicycleEDB.csv.gz\") ." - + "@source wheel(1) : load-csv(\"" + ExamplesUtils.INPUT_FOLDER + "wheelEDB.csv.gz\") ." + + "@source bicycle[1] : load-csv(\"" + ExamplesUtils.INPUT_FOLDER + "bicycleEDB.csv.gz\") ." + + "@source wheel[1] : load-csv(\"" + ExamplesUtils.INPUT_FOLDER + "wheelEDB.csv.gz\") ." // every bicycle has some part that is a wheel: + "hasPart(?X, !Y), wheel(!Y) :- bicycle(?X) ." // every wheel is part of some bicycle: diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java index a00a008d1..3a2cd3f8f 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java @@ -67,7 +67,7 @@ public static void main(final String[] args) throws IOException, ParsingExceptio + "@prefix ex: ." + "@prefix rdf: ." // specify data sources: - + "@source triple(3) : load-rdf(\"" + ExamplesUtils.INPUT_FOLDER + "ternaryBicycleEDB.nt.gz\") ." + + "@source triple[3] : load-rdf(\"" + ExamplesUtils.INPUT_FOLDER + "ternaryBicycleEDB.nt.gz\") ." // every bicycle has some part that is a wheel: + "triple(?S, ex:hasPart, !X), triple(!X, rdf:type, ex:wheel) :- triple(?S, rdf:type, ex:bicycle) ." // every wheel is part of some bicycle: diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 3881f4bdf..89f542b82 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -103,7 +103,7 @@ void source() throws PrefixDeclarationException: Token arity; } { - < SOURCE > predicateName = predicateName() < LPAREN > arity = < INTEGER > < RPAREN > < COLON > dataSource = dataSource() < DOT > + < SOURCE > predicateName = predicateName() < LBRACK > arity = < INTEGER > < RBRACK > < COLON > dataSource = dataSource() < DOT > { int nArity; nArity = Integer.parseInt(arity.image); @@ -518,6 +518,8 @@ TOKEN : { < LPAREN : "(" > | < RPAREN : ")" > +| < LBRACK : "[" > +| < RBRACK : "]" > | < COMMA : "," > | < DOT : "." > | < ARROW : ":-" > diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java index c2fb1cfaa..3ca6b90d2 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java @@ -21,14 +21,9 @@ */ import static org.junit.Assert.assertEquals; -import java.io.File; -import java.io.IOException; -import java.net.URL; - import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; @@ -38,14 +33,9 @@ import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.model.implementation.LanguageStringConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; @@ -239,38 +229,4 @@ public void datatypeDecimalToStringRoundTripTest() throws ParsingException { assertEquals(shortDecimalConstant, RuleParser.parseFact("p(" + shortDecimalConstant + ").").getArguments().get(0).toString()); } - - @Test - public void sparqlDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { - KnowledgeBase kb = new KnowledgeBase(); - Predicate predicate1 = Expressions.makePredicate("p", 3); - SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(new URL("https://example.org/sparql"), - "var", "?var wdt:P31 wd:Q5 ."); - DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource); - RuleParser.parseInto(kb, dataSourceDeclaration1.toString()); - assertEquals(dataSourceDeclaration1, kb.getDataSourceDeclarations().get(0)); - } - - @Test - public void rdfDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { - KnowledgeBase kb = new KnowledgeBase(); - Predicate predicate1 = Expressions.makePredicate("p", 3); - RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(new File("src/test/data/input/file.nt")); - DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate1, - unzippedRdfFileDataSource); - RuleParser.parseInto(kb, dataSourceDeclaration.toString()); - assertEquals(dataSourceDeclaration, kb.getDataSourceDeclarations().get(0)); - } - - @Test - public void csvDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { - KnowledgeBase kb = new KnowledgeBase(); - Predicate predicate1 = Expressions.makePredicate("q", 1); - CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File("src/test/data/input/file.csv")); - final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate1, - unzippedCsvFileDataSource); - RuleParser.parseInto(kb, dataSourceDeclaration.toString()); - assertEquals(dataSourceDeclaration, kb.getDataSourceDeclarations().get(0)); - } - } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java index ff6029184..16011fa0f 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java @@ -19,9 +19,11 @@ * limitations under the License. * #L% */ - -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; +import static org.junit.Assert.assertEquals; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; import java.io.File; import java.io.IOException; @@ -32,6 +34,11 @@ import org.junit.Test; import org.mockito.ArgumentMatchers; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; @@ -44,27 +51,27 @@ public class RuleParserDataSourceTest { @Test public void testCsvSource() throws ParsingException, IOException { - String input = "@source p(2) : load-csv(\"src/main/data/input/example.csv\") ."; + String input = "@source p[2] : load-csv(\"src/main/data/input/example.csv\") ."; CsvFileDataSource csvds = new CsvFileDataSource(new File("src/main/data/input/example.csv")); assertEquals(csvds, RuleParser.parseDataSourceDeclaration(input)); } @Test public void testRdfSource() throws ParsingException, IOException { - String input = "@source p(3) : load-rdf(\"src/main/data/input/example.nt.gz\") ."; + String input = "@source p[3] : load-rdf(\"src/main/data/input/example.nt.gz\") ."; RdfFileDataSource rdfds = new RdfFileDataSource(new File("src/main/data/input/example.nt.gz")); assertEquals(rdfds, RuleParser.parseDataSourceDeclaration(input)); } @Test(expected = ParsingException.class) public void testRdfSourceInvalidArity() throws ParsingException, IOException { - String input = "@source p(2) : load-rdf(\"src/main/data/input/example.nt.gz\") ."; + String input = "@source p[2] : load-rdf(\"src/main/data/input/example.nt.gz\") ."; RuleParser.parseDataSourceDeclaration(input); } @Test public void testSparqlSource() throws ParsingException, MalformedURLException { - String input = "@source p(2) : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; + String input = "@source p[2] : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; SparqlQueryResultDataSource sparqlds = new SparqlQueryResultDataSource( new URL("https://query.wikidata.org/sparql"), "disease, doid", "?disease wdt:P699 ?doid ."); assertEquals(sparqlds, RuleParser.parseDataSourceDeclaration(input)); @@ -72,13 +79,13 @@ public void testSparqlSource() throws ParsingException, MalformedURLException { @Test(expected = ParsingException.class) public void testSparqlSourceMalformedUrl() throws ParsingException, MalformedURLException { - String input = "@source p(2) : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; + String input = "@source p[2] : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; RuleParser.parseDataSourceDeclaration(input); } @Test(expected = ParsingException.class) public void testUnknownDataSource() throws ParsingException { - String input = "@source p(2) : unknown-data-source(\"hello, world\") ."; + String input = "@source p[2] : unknown-data-source(\"hello, world\") ."; RuleParser.parseDataSourceDeclaration(input); } @@ -91,10 +98,43 @@ public void testCustomDataSource() throws ParsingException { doReturn(source).when(handler).handleDeclaration(ArgumentMatchers.>any(), ArgumentMatchers.any()); - String input = "@source p(2) : mock-source(\"hello\", \"world\") ."; + String input = "@source p[2] : mock-source(\"hello\", \"world\") ."; List expectedArguments = Arrays.asList("hello", "world"); RuleParser.parseDataSourceDeclaration(input, parserConfiguration); verify(handler).handleDeclaration(eq(expectedArguments), ArgumentMatchers.any()); } + + @Test + public void sparqlDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { + KnowledgeBase kb = new KnowledgeBase(); + Predicate predicate1 = Expressions.makePredicate("p", 3); + SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(new URL("https://example.org/sparql"), + "var", "?var wdt:P31 wd:Q5 ."); + DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource); + RuleParser.parseInto(kb, dataSourceDeclaration1.toString()); + assertEquals(dataSourceDeclaration1, kb.getDataSourceDeclarations().get(0)); + } + + @Test + public void rdfDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { + KnowledgeBase kb = new KnowledgeBase(); + Predicate predicate1 = Expressions.makePredicate("p", 3); + RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(new File("src/test/data/input/file.nt")); + DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate1, + unzippedRdfFileDataSource); + RuleParser.parseInto(kb, dataSourceDeclaration.toString()); + assertEquals(dataSourceDeclaration, kb.getDataSourceDeclarations().get(0)); + } + + @Test + public void csvDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { + KnowledgeBase kb = new KnowledgeBase(); + Predicate predicate1 = Expressions.makePredicate("q", 1); + CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File("src/test/data/input/file.csv")); + final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate1, + unzippedCsvFileDataSource); + RuleParser.parseInto(kb, dataSourceDeclaration.toString()); + assertEquals(dataSourceDeclaration, kb.getDataSourceDeclarations().get(0)); + } } From e4fcf2504eb1de93f7d887ba2a22b3d780b2d943 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 4 Dec 2019 15:54:28 +0100 Subject: [PATCH 0434/1003] Use openjdk8 on xenial --- .travis.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.travis.yml b/.travis.yml index fde2269c2..a5985bb38 100644 --- a/.travis.yml +++ b/.travis.yml @@ -17,6 +17,8 @@ matrix: - g++-6 - libstdc++6 env: CC=gcc-6 CXX=g++-6 + jdk: + - openjdk8 - os: osx osx_image: xcode10.2 From 7f02a16c5c9774716070cb98aac473b8d56c8f56 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 4 Dec 2019 16:13:42 +0100 Subject: [PATCH 0435/1003] Submit coverage only for the main build --- .travis.yml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index a5985bb38..908b769d3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -5,6 +5,8 @@ matrix: dist: bionic jdk: - openjdk11 + after_success: + - mvn clean test jacoco:report coveralls:report - os: linux dist: xenial @@ -35,9 +37,6 @@ jobs: install: mvn install $OPTIONS -DskipTests=true -after_success: - - mvn clean test jacoco:report coveralls:report - sudo: false cache: From 315e3fd800ed69c296cb4764004cf1f80eff1bb1 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 4 Dec 2019 16:37:04 +0100 Subject: [PATCH 0436/1003] Add note on OS compatibility --- README.md | 8 +++++--- RELEASE-NOTES.md | 2 ++ 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 19d2290a2..e11ad17f7 100644 --- a/README.md +++ b/README.md @@ -28,11 +28,13 @@ You need to use Java 1.8 or above. Available modules include: * **vlog4j-owlapi**: support for converting rules from OWL ontology, loaded with the OWL API * **vlog4j-client**: stand-alone application that builds a [command-line client](https://github.com/knowsys/vlog4j/wiki/Standalone-client) for VLog4j. -The released packages use vlog4j-base, which packages system-dependent binaries for Linux, MacOS, and Windows, and should work out of the box with current versions of these systems. In case of problems, or if you are using the current development version, own binaries can be compiled as follows: +The released packages use vlog4j-base, which packages system-dependent binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows: * Run [build-vlog-library.sh](https://github.com/knowsys/vlog4j/blob/master/build-vlog-library.sh) or execute the commands in this file manually. This will compile a local jar file on your system, copy it to ```./vlog4j-core/lib/jvlog-local.jar```, and install the new jar locally in Maven in place of the distributed version of vlog4j-base. * Run ```mvn install``` to test if the setup works + + Documentation ------------- @@ -47,5 +49,5 @@ Development * Pull requests are welcome. * The master branch may require a development version of VLog. Use the script `build-vlog-library.sh` to create and install it on your machine (you may need to delete previous local builds first). -* Users of Eclipse should install the javacc plugin to generate the parser sources. After installing the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.vlog4j.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. -* We largely follow [Java Programming Style Guidelines published by Petroware](https://petroware.no/javastyle.html). The main exception are the names of private members, which do not usually end in underscores in our code. +* Users of Eclipse should install the javacc plugin to generate the parser sources. After installing the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.vlog4j.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. +* We largely follow [Java Programming Style Guidelines published by Petroware](https://petroware.no/javastyle.html). The main exception are the names of private members, which do not usually end in underscores in our code. \ No newline at end of file diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 2e99a8640..d3c2ed11d 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -15,6 +15,7 @@ Breaking changes: New features: * New module vlog4j-client provides a stand-alone command line client jar for VLog4j * A wiki for VLog4j use and related publications has been created: https://github.com/knowsys/vlog4j/wiki +* The parser behaviour for data source declarations and certain datatype literals can be customised. Other improvements: * Data model is better aligned with syntax supported by parser @@ -27,6 +28,7 @@ Bugfixes: * Acyclicity checks work again without calling reason() first (issue #128) * in vlog4j-owlapi, class expressions of type ObjectMaxCardinality are not allowed in superclasses (issue #104) * in vlog4j-owlapi, class expressions of type ObjectOneOf are only allowed as subclasses in axioms of type subClassOf (issue #20) +* When parsing syntactic fragment such as Facts or Literals, the parser now enforces that all input is consumed. VLog4j v0.4.0 ------------- From 609d70a332024243c31e3cc29213814f47a72200 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 4 Dec 2019 19:58:34 +0100 Subject: [PATCH 0437/1003] Parser: Address review comments --- vlog4j-parser/pom.xml | 182 +++++++++--------- .../parser/DataSourceDeclarationHandler.java | 2 +- .../parser/DatatypeConstantHandler.java | 4 +- .../vlog4j/parser/ParserConfiguration.java | 37 ++-- .../semanticweb/vlog4j/parser/RuleParser.java | 25 +-- .../CsvFileDataSourceDeclarationHandler.java | 2 +- .../RdfFileDataSourceDeclarationHandler.java | 2 +- ...eryResultDataSourceDeclarationHandler.java | 13 +- .../parser/javacc/JavaCCParserBase.java | 21 +- .../parser/RuleParserDataSourceTest.java | 24 ++- 10 files changed, 158 insertions(+), 154 deletions(-) diff --git a/vlog4j-parser/pom.xml b/vlog4j-parser/pom.xml index 6e3773633..f05d6ef5a 100644 --- a/vlog4j-parser/pom.xml +++ b/vlog4j-parser/pom.xml @@ -24,95 +24,97 @@ vlog4j-core ${project.version} - + - - - - org.codehaus.mojo - javacc-maven-plugin - 2.6 - - - ruleparser - - ${basedir}/src/main/java/org/semanticweb/vlog4j/parser/javacc/ - - - javacc - - - - - - org.codehaus.mojo - build-helper-maven-plugin - 1.8 - - - generate-sources - - - ${project.build.directory}/generated-sources/javacc/ - - - - add-source - - - - - - - - - - org.eclipse.m2e - lifecycle-mapping - 1.0.0 - - - - - - org.codehaus.mojo - javacc-maven-plugin - [2.6,) - - javacc - - - - - - - - - org.codehaus.mojo - build-helper-maven-plugin - [1.0,) - - parse-version - add-source - maven-version - add-resource - add-test-resource - add-test-source - - - - - true - true - - - - - - - - - - + + + + org.codehaus.mojo + javacc-maven-plugin + 2.6 + + + ruleparser + + ${basedir}/src/main/java/org/semanticweb/vlog4j/parser/javacc/ + + + javacc + + + + + + + org.codehaus.mojo + build-helper-maven-plugin + 1.8 + + + generate-sources + + + ${project.build.directory}/generated-sources/javacc/ + + + + add-source + + + + + + + + + + org.eclipse.m2e + lifecycle-mapping + 1.0.0 + + + + + + org.codehaus.mojo + javacc-maven-plugin + [2.6,) + + javacc + + + + + + + + + org.codehaus.mojo + build-helper-maven-plugin + [1.0,) + + parse-version + add-source + maven-version + add-resource + add-test-resource + add-test-source + + + + + true + true + + + + + + + + + + diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java index d1f8766b1..c191ffa76 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java @@ -55,7 +55,7 @@ public DataSource handleDeclaration(List arguments, final SubParserFacto * @throws ParsingException when the given number of Arguments is invalid for * the Data Source. */ - static void verifyCorrectNumberOfArguments(List arguments, int number) throws ParsingException { + static void validateNumberOfArguments(List arguments, int number) throws ParsingException { if (arguments.size() != number) { throw new ParsingException("Invalid number of arguments " + arguments.size() + " for Data Source declaration, expected " + number); diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DatatypeConstantHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DatatypeConstantHandler.java index 7f8ffc3e7..c584b876a 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DatatypeConstantHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DatatypeConstantHandler.java @@ -23,14 +23,14 @@ import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; /** - * Handler for parsing a custom Data Source declaration. + * Handler for parsing a custom Datatype constant. * * @author Maximilian Marx */ @FunctionalInterface public interface DatatypeConstantHandler { /** - * Parse a Data Source Declaration. + * Parse a datatype constant. * * @param lexicalForm lexical representation of the constant. * diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index 1aec212b0..4d75fae0a 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -23,6 +23,7 @@ import java.util.HashMap; import java.util.List; +import org.apache.commons.lang3.Validate; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; @@ -35,6 +36,16 @@ * @author Maximilian Marx */ public class ParserConfiguration { + /** + * The registered data sources. + */ + private HashMap dataSources = new HashMap<>(); + + /** + * The registered datatypes. + */ + private HashMap datatypes = new HashMap<>(); + /** * Register a new Data Source. * @@ -47,9 +58,7 @@ public class ParserConfiguration { */ public ParserConfiguration registerDataSource(String name, DataSourceDeclarationHandler handler) throws IllegalArgumentException { - if (dataSources.containsKey(name)) { - throw new IllegalArgumentException("Data source \"" + name + "\" is already registered."); - } + Validate.isTrue(!dataSources.containsKey(name), "The Data Source \"%s\" is already registered.", name); this.dataSources.put(name, handler); return this; @@ -96,10 +105,10 @@ public DataSource parseDataSourceDeclaration(String name, List args, */ public Constant parseConstant(String lexicalForm, String languageTag, String datatype) throws ParsingException, IllegalArgumentException { - if (languageTag != null && datatype != null) { - throw new IllegalArgumentException( - "A constant with a language tag may not explicitly specify a data type."); - } else if (languageTag != null) { + Validate.isTrue(languageTag == null || datatype == null, + "A constant with a language tag may not explicitly specify a data type."); + + if (languageTag != null) { return Expressions.makeLanguageStringConstant(lexicalForm, languageTag); } else { String type = ((datatype != null) ? datatype : PrefixDeclarations.XSD_STRING); @@ -127,21 +136,9 @@ public Constant parseConstant(String lexicalForm, String languageTag, String dat */ public ParserConfiguration registerDatatype(String name, DatatypeConstantHandler handler) throws IllegalArgumentException { - if (datatypes.containsKey(name)) { - throw new IllegalArgumentException("Data type \"" + name + "\" is already registered."); - } + Validate.isTrue(!dataSources.containsKey(name), "The Data type \"%s\" is already registered.", name); this.datatypes.put(name, handler); return this; } - - /** - * The registered data sources. - */ - private HashMap dataSources = new HashMap<>(); - - /** - * The registered datatypes. - */ - private HashMap datatypes = new HashMap<>(); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index 5d942435a..4c95f04b5 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -48,6 +48,8 @@ */ public class RuleParser { + private static final String DEFAULT_STRING_ENCODING = "UTF-8"; + private static Logger LOGGER = LoggerFactory.getLogger(RuleParser.class); public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, final String encoding, @@ -60,13 +62,13 @@ public static void parseInto(final KnowledgeBase knowledgeBase, final InputStrea public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, final ParserConfiguration parserConfiguration) throws ParsingException { - parseInto(knowledgeBase, stream, "UTF-8", parserConfiguration); + parseInto(knowledgeBase, stream, DEFAULT_STRING_ENCODING, parserConfiguration); } public static void parseInto(final KnowledgeBase knowledgeBase, final String input, final ParserConfiguration parserConfiguration) throws ParsingException { final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); - parseInto(knowledgeBase, inputStream, "UTF-8", parserConfiguration); + parseInto(knowledgeBase, inputStream, DEFAULT_STRING_ENCODING, parserConfiguration); } public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, final String encoding) @@ -77,12 +79,12 @@ public static void parseInto(final KnowledgeBase knowledgeBase, final InputStrea } public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream) throws ParsingException { - parseInto(knowledgeBase, stream, "UTF-8"); + parseInto(knowledgeBase, stream, DEFAULT_STRING_ENCODING); } public static void parseInto(final KnowledgeBase knowledgeBase, final String input) throws ParsingException { final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); - parseInto(knowledgeBase, inputStream, "UTF-8"); + parseInto(knowledgeBase, inputStream, DEFAULT_STRING_ENCODING); } public static KnowledgeBase parse(final InputStream stream, final String encoding, @@ -94,13 +96,13 @@ public static KnowledgeBase parse(final InputStream stream, final String encodin public static KnowledgeBase parse(final InputStream stream, final ParserConfiguration parserConfiguration) throws ParsingException { - return parse(stream, "UTF-8", parserConfiguration); + return parse(stream, DEFAULT_STRING_ENCODING, parserConfiguration); } public static KnowledgeBase parse(final String input, final ParserConfiguration parserConfiguration) throws ParsingException { final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); - return parse(inputStream, "UTF-8", parserConfiguration); + return parse(inputStream, DEFAULT_STRING_ENCODING, parserConfiguration); } public static KnowledgeBase parse(final InputStream stream, final String encoding) throws ParsingException { @@ -108,12 +110,12 @@ public static KnowledgeBase parse(final InputStream stream, final String encodin } public static KnowledgeBase parse(final InputStream stream) throws ParsingException { - return parse(stream, "UTF-8"); + return parse(stream, DEFAULT_STRING_ENCODING); } public static KnowledgeBase parse(final String input) throws ParsingException { final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); - return parse(inputStream, "UTF-8"); + return parse(inputStream, DEFAULT_STRING_ENCODING); } /** @@ -141,20 +143,21 @@ T parse(final JavaCCParser parser) static T parseSyntaxFragment(final String input, SyntaxFragmentParser parserAction, final String syntaxFragmentType, final ParserConfiguration parserConfiguration) throws ParsingException { final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); - final JavaCCParser localParser = new JavaCCParser(inputStream, "UTF-8"); + final JavaCCParser localParser = new JavaCCParser(inputStream, DEFAULT_STRING_ENCODING); if (parserConfiguration != null) { localParser.setParserConfiguration(parserConfiguration); } + T result; try { - T result = parserAction.parse(localParser); + result = parserAction.parse(localParser); localParser.ensureEndOfInput(); - return result; } catch (ParseException | PrefixDeclarationException | TokenMgrError e) { LOGGER.error("Exception while parsing " + syntaxFragmentType + ": {}!", input); throw new ParsingException("Exception while parsing " + syntaxFragmentType, e); } + return result; } public static Rule parseRule(final String input, final ParserConfiguration parserConfiguration) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java index dc640dff5..cf585e7e6 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java @@ -39,7 +39,7 @@ public class CsvFileDataSourceDeclarationHandler implements DataSourceDeclaratio @Override public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) throws ParsingException { - DataSourceDeclarationHandler.verifyCorrectNumberOfArguments(arguments, 1); + DataSourceDeclarationHandler.validateNumberOfArguments(arguments, 1); String fileName = arguments.get(0); try { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java index 29714b972..475f66d59 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java @@ -39,7 +39,7 @@ public class RdfFileDataSourceDeclarationHandler implements DataSourceDeclaratio @Override public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) throws ParsingException { - DataSourceDeclarationHandler.verifyCorrectNumberOfArguments(arguments, 1); + DataSourceDeclarationHandler.validateNumberOfArguments(arguments, 1); String fileName = arguments.get(0); try { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java index 1faff9341..ebfacabf3 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java @@ -45,23 +45,22 @@ public class SparqlQueryResultDataSourceDeclarationHandler implements DataSource @Override public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) throws ParsingException { - DataSourceDeclarationHandler.verifyCorrectNumberOfArguments(arguments, 3); + DataSourceDeclarationHandler.validateNumberOfArguments(arguments, 3); String endpoint = arguments.get(0); + URL endpointUrl; try { JavaCCParser parser = subParserFactory.makeSubParser(endpoint); - endpoint = parser.IRI(false); + endpointUrl = new URL(parser.IRI(false)); } catch (ParseException | PrefixDeclarationException e) { throw new ParsingException(e); + } catch (MalformedURLException e) { + throw new ParsingException("SPARQL endpoint \"" + endpoint + "\" is not a valid URL: " + e.getMessage(), e); } String variables = arguments.get(1); String query = arguments.get(2); - try { - return new SparqlQueryResultDataSource(new URL(endpoint), variables, query); - } catch (MalformedURLException e) { - throw new ParsingException("SPARQL endpoint \"" + endpoint + "\" is not a valid URL: " + e.getMessage(), e); - } + return new SparqlQueryResultDataSource(endpointUrl, variables, query); } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 7abc4ad4f..b254665a0 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -21,22 +21,19 @@ */ import java.util.HashSet; -import java.util.List; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.vlog4j.core.model.implementation.DatatypeConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.LanguageStringConstantImpl; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.DefaultParserConfiguration; import org.semanticweb.vlog4j.parser.LocalPrefixDeclarations; import org.semanticweb.vlog4j.parser.ParserConfiguration; import org.semanticweb.vlog4j.parser.ParsingException; -import org.semanticweb.vlog4j.core.model.api.Predicate; /** * Basic methods used in the JavaCC-generated parser. @@ -53,23 +50,23 @@ * */ public class JavaCCParserBase { - PrefixDeclarations prefixDeclarations; + protected PrefixDeclarations prefixDeclarations; - KnowledgeBase knowledgeBase; - ParserConfiguration parserConfiguration; + protected KnowledgeBase knowledgeBase; + protected ParserConfiguration parserConfiguration; /** * "Local" variable to remember (universal) body variables during parsing. */ - final HashSet bodyVars = new HashSet(); + protected final HashSet bodyVars = new HashSet(); /** * "Local" variable to remember existential head variables during parsing. */ - final HashSet headExiVars = new HashSet();; + protected final HashSet headExiVars = new HashSet();; /** * "Local" variable to remember universal head variables during parsing. */ - final HashSet headUniVars = new HashSet();; + protected final HashSet headUniVars = new HashSet();; /** * Defines the context for parsing sub-formulas. @@ -121,7 +118,9 @@ Constant createConstant(String lexicalForm, String languageTag, String datatype) try { return parserConfiguration.parseConstant(lexicalForm, languageTag, datatype); } catch (ParsingException e) { - throw new ParseException(e.getMessage()); + ParseException parseException = new ParseException(e.getMessage()); + parseException.initCause(e); + throw parseException; } } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java index 16011fa0f..b345063b6 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java @@ -49,31 +49,35 @@ import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; public class RuleParserDataSourceTest { + private static final String EXAMPLE_RDF_FILE_PATH = "src/main/data/input/example.nt.gz"; + private static final String EXAMPLE_CSV_FILE_PATH = "src/main/data/input/example.csv"; + private static final String WIKIDATA_SPARQL_ENDPOINT_URI = "https://query.wikidata.org/sparql"; + @Test public void testCsvSource() throws ParsingException, IOException { - String input = "@source p[2] : load-csv(\"src/main/data/input/example.csv\") ."; - CsvFileDataSource csvds = new CsvFileDataSource(new File("src/main/data/input/example.csv")); + String input = "@source p[2] : load-csv(\"" + EXAMPLE_CSV_FILE_PATH + "\") ."; + CsvFileDataSource csvds = new CsvFileDataSource(new File(EXAMPLE_CSV_FILE_PATH)); assertEquals(csvds, RuleParser.parseDataSourceDeclaration(input)); } @Test public void testRdfSource() throws ParsingException, IOException { - String input = "@source p[3] : load-rdf(\"src/main/data/input/example.nt.gz\") ."; - RdfFileDataSource rdfds = new RdfFileDataSource(new File("src/main/data/input/example.nt.gz")); + String input = "@source p[3] : load-rdf(\"" + EXAMPLE_RDF_FILE_PATH + "\") ."; + RdfFileDataSource rdfds = new RdfFileDataSource(new File(EXAMPLE_RDF_FILE_PATH)); assertEquals(rdfds, RuleParser.parseDataSourceDeclaration(input)); } @Test(expected = ParsingException.class) public void testRdfSourceInvalidArity() throws ParsingException, IOException { - String input = "@source p[2] : load-rdf(\"src/main/data/input/example.nt.gz\") ."; + String input = "@source p[2] : load-rdf(\"" + EXAMPLE_RDF_FILE_PATH + "\") ."; RuleParser.parseDataSourceDeclaration(input); } @Test public void testSparqlSource() throws ParsingException, MalformedURLException { - String input = "@source p[2] : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; + String input = "@source p[2] : sparql(<" + WIKIDATA_SPARQL_ENDPOINT_URI + ">,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; SparqlQueryResultDataSource sparqlds = new SparqlQueryResultDataSource( - new URL("https://query.wikidata.org/sparql"), "disease, doid", "?disease wdt:P699 ?doid ."); + new URL(WIKIDATA_SPARQL_ENDPOINT_URI), "disease, doid", "?disease wdt:P699 ?doid ."); assertEquals(sparqlds, RuleParser.parseDataSourceDeclaration(input)); } @@ -109,7 +113,7 @@ public void testCustomDataSource() throws ParsingException { public void sparqlDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { KnowledgeBase kb = new KnowledgeBase(); Predicate predicate1 = Expressions.makePredicate("p", 3); - SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(new URL("https://example.org/sparql"), + SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(new URL(WIKIDATA_SPARQL_ENDPOINT_URI), "var", "?var wdt:P31 wd:Q5 ."); DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource); RuleParser.parseInto(kb, dataSourceDeclaration1.toString()); @@ -120,7 +124,7 @@ public void sparqlDataSourceDeclarationToStringParsingTest() throws ParsingExcep public void rdfDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { KnowledgeBase kb = new KnowledgeBase(); Predicate predicate1 = Expressions.makePredicate("p", 3); - RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(new File("src/test/data/input/file.nt")); + RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(new File(EXAMPLE_RDF_FILE_PATH)); DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate1, unzippedRdfFileDataSource); RuleParser.parseInto(kb, dataSourceDeclaration.toString()); @@ -131,7 +135,7 @@ public void rdfDataSourceDeclarationToStringParsingTest() throws ParsingExceptio public void csvDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { KnowledgeBase kb = new KnowledgeBase(); Predicate predicate1 = Expressions.makePredicate("q", 1); - CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File("src/test/data/input/file.csv")); + CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File(EXAMPLE_CSV_FILE_PATH)); final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate1, unzippedCsvFileDataSource); RuleParser.parseInto(kb, dataSourceDeclaration.toString()); From 4beb0baa277fe3f4ba1a223468a0f8129b79c74e Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 5 Dec 2019 17:39:19 +0100 Subject: [PATCH 0438/1003] Parser: Don't exclude non-generated JavaCCParserBase from coverage --- pom.xml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/pom.xml b/pom.xml index 07c9e2e46..8e831b96f 100644 --- a/pom.xml +++ b/pom.xml @@ -290,14 +290,14 @@ - **/javacc/JavaCCParser* - **/javacc/JavaCCParserConstants* - **/javacc/JavaCCParserTokenManager* - **/javacc/JavaCharStream* - **/javacc/ParseException* - **/javacc/SimpleCharStream* - **/javacc/Token* - **/javacc/TokenMgrError* + **/javacc/JavaCCParser.class + **/javacc/JavaCCParserConstants.class + **/javacc/JavaCCParserTokenManager.class + **/javacc/JavaCharStream.class + **/javacc/ParseException.class + **/javacc/SimpleCharStream.class + **/javacc/Token.class + **/javacc/TokenMgrError.class From a13f7884fb44afdabf08177274d9763ef9ca9eae Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 5 Dec 2019 17:42:16 +0100 Subject: [PATCH 0439/1003] Parser: Return DataSourceDeclaration from parseDataSourceDeclaration --- .../org/semanticweb/vlog4j/parser/RuleParser.java | 8 ++++---- .../syntax/parser/RuleParserDataSourceTest.java | 15 ++++++++------- 2 files changed, 12 insertions(+), 11 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index 4c95f04b5..50e18558f 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -216,13 +216,13 @@ public static Term parseTerm(final String input) throws ParsingException { return parseTerm(input, (ParserConfiguration) null); } - public static DataSource parseDataSourceDeclaration(final String input, ParserConfiguration parserConfiguration) + public static DataSourceDeclaration parseDataSourceDeclaration(final String input, ParserConfiguration parserConfiguration) throws ParsingException { return parseSyntaxFragment(input, RuleParser::parseAndExtractDatasourceDeclaration, "data source declaration", parserConfiguration); } - public static DataSource parseDataSourceDeclaration(final String input) throws ParsingException { + public static DataSourceDeclaration parseDataSourceDeclaration(final String input) throws ParsingException { return parseDataSourceDeclaration(input, null); } @@ -236,7 +236,7 @@ static KnowledgeBase doParse(final JavaCCParser parser) throws ParsingException return parser.getKnowledgeBase(); } - protected static DataSource parseAndExtractDatasourceDeclaration(final JavaCCParser parser) + protected static DataSourceDeclaration parseAndExtractDatasourceDeclaration(final JavaCCParser parser) throws ParsingException, ParseException, PrefixDeclarationException { parser.source(); @@ -248,7 +248,7 @@ protected static DataSource parseAndExtractDatasourceDeclaration(final JavaCCPar "Unexpected number of data source declarations: " + dataSourceDeclarations.size()); } - return dataSourceDeclarations.get(0).getDataSource(); + return dataSourceDeclarations.get(0); } } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java index b345063b6..90c7074a1 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java @@ -55,16 +55,16 @@ public class RuleParserDataSourceTest { @Test public void testCsvSource() throws ParsingException, IOException { - String input = "@source p[2] : load-csv(\"" + EXAMPLE_CSV_FILE_PATH + "\") ."; + String input = "@source p[2] : load-csv(\"" + EXAMPLE_CSV_FILE_PATH + "\") ."; CsvFileDataSource csvds = new CsvFileDataSource(new File(EXAMPLE_CSV_FILE_PATH)); - assertEquals(csvds, RuleParser.parseDataSourceDeclaration(input)); + assertEquals(csvds, RuleParser.parseDataSourceDeclaration(input).getDataSource()); } @Test public void testRdfSource() throws ParsingException, IOException { String input = "@source p[3] : load-rdf(\"" + EXAMPLE_RDF_FILE_PATH + "\") ."; RdfFileDataSource rdfds = new RdfFileDataSource(new File(EXAMPLE_RDF_FILE_PATH)); - assertEquals(rdfds, RuleParser.parseDataSourceDeclaration(input)); + assertEquals(rdfds, RuleParser.parseDataSourceDeclaration(input).getDataSource()); } @Test(expected = ParsingException.class) @@ -75,10 +75,11 @@ public void testRdfSourceInvalidArity() throws ParsingException, IOException { @Test public void testSparqlSource() throws ParsingException, MalformedURLException { - String input = "@source p[2] : sparql(<" + WIKIDATA_SPARQL_ENDPOINT_URI + ">,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; - SparqlQueryResultDataSource sparqlds = new SparqlQueryResultDataSource( - new URL(WIKIDATA_SPARQL_ENDPOINT_URI), "disease, doid", "?disease wdt:P699 ?doid ."); - assertEquals(sparqlds, RuleParser.parseDataSourceDeclaration(input)); + String input = "@source p[2] : sparql(<" + WIKIDATA_SPARQL_ENDPOINT_URI + + ">,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; + SparqlQueryResultDataSource sparqlds = new SparqlQueryResultDataSource(new URL(WIKIDATA_SPARQL_ENDPOINT_URI), + "disease, doid", "?disease wdt:P699 ?doid ."); + assertEquals(sparqlds, RuleParser.parseDataSourceDeclaration(input).getDataSource()); } @Test(expected = ParsingException.class) From 81cfb25bdcd91aaf189c1954515687014a02eb29 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 5 Dec 2019 17:43:11 +0100 Subject: [PATCH 0440/1003] Parser: Always keep cause when converting to/from ParseExceptions --- .../vlog4j/parser/javacc/JavaCCParser.jj | 6 ++--- .../parser/javacc/JavaCCParserBase.java | 25 +++++++++++++++---- 2 files changed, 22 insertions(+), 9 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 89f542b82..cd1f0c045 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -108,8 +108,6 @@ void source() throws PrefixDeclarationException: int nArity; nArity = Integer.parseInt(arity.image); // Do not catch NumberFormatException: < INTEGER > matches must parse as int in Java! - if ( dataSource instanceof RdfFileDataSource && nArity != 3 ) - throw new ParseException("Cannot load RDF data into predicate of arity " + nArity +"."); addDataSource(predicateName, nArity, dataSource); } @@ -126,7 +124,7 @@ DataSource dataSource() throws PrefixDeclarationException: try { return parserConfiguration.parseDataSourceDeclaration(sourceName.image, arguments, getSubParserFactory()); } catch (ParsingException e) { - throw new ParseException(e.getMessage()); + throw makeParseExceptionWithCause(e); } } } @@ -222,7 +220,7 @@ Fact fact(FormulaContext context) throws PrefixDeclarationException: try { return Expressions.makeFact(predicateName, terms); } catch (IllegalArgumentException e) { - throw new ParseException("Error parsing fact: " + e.toString()); + throw makeParseExceptionWithCause("Error parsing fact: " + e.getMessage(), e); } } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index b254665a0..08d9ab261 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -97,7 +97,7 @@ Constant createConstant(String lexicalForm) throws ParseException { try { return Expressions.makeAbstractConstant(prefixDeclarations.absolutize(lexicalForm)); } catch (PrefixDeclarationException e) { - throw new ParseException(e.getMessage()); + throw makeParseExceptionWithCause(e); } } @@ -105,7 +105,6 @@ Constant createConstant(String lexicalForm, String datatype) throws ParseExcepti return createConstant(lexicalForm, null, datatype); } - /** * Creates a suitable {@link Constant} from the parsed data. * @@ -118,9 +117,7 @@ Constant createConstant(String lexicalForm, String languageTag, String datatype) try { return parserConfiguration.parseConstant(lexicalForm, languageTag, datatype); } catch (ParsingException e) { - ParseException parseException = new ParseException(e.getMessage()); - parseException.initCause(e); - throw parseException; + throw makeParseExceptionWithCause(e); } } @@ -235,6 +232,24 @@ void resetVariableSets() { this.headUniVars.clear(); } + /** + * Convert a throwable into a ParseException. + * + * @param message The error message. + * @param cause The {@link Throwable} that caused this exception. + * + * @return A {@link ParseException} with appropriate cause and message. + */ + protected ParseException makeParseExceptionWithCause(String message, Throwable cause) { + ParseException parseException = new ParseException(message); + parseException.initCause(cause); + return parseException; + } + + protected ParseException makeParseExceptionWithCause(Throwable cause) { + return makeParseExceptionWithCause(cause.getMessage(), cause); + } + public void setKnowledgeBase(KnowledgeBase knowledgeBase) { this.knowledgeBase = knowledgeBase; } From 97b35f5c6bfc5ca68fc70afcdef219765d6fa98a Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 5 Dec 2019 17:44:11 +0100 Subject: [PATCH 0441/1003] Parser: Fix checking for duplicate datatype handlers --- .../vlog4j/parser/ParserConfiguration.java | 2 +- .../parser/ParserConfigurationTest.java | 73 +++++++++++++++++++ 2 files changed, 74 insertions(+), 1 deletion(-) create mode 100644 vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/ParserConfigurationTest.java diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index 4d75fae0a..695decb21 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -136,7 +136,7 @@ public Constant parseConstant(String lexicalForm, String languageTag, String dat */ public ParserConfiguration registerDatatype(String name, DatatypeConstantHandler handler) throws IllegalArgumentException { - Validate.isTrue(!dataSources.containsKey(name), "The Data type \"%s\" is already registered.", name); + Validate.isTrue(!datatypes.containsKey(name), "The Data type \"%s\" is already registered.", name); this.datatypes.put(name, handler); return this; diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/ParserConfigurationTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/ParserConfigurationTest.java new file mode 100644 index 000000000..d509fe7f4 --- /dev/null +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/ParserConfigurationTest.java @@ -0,0 +1,73 @@ +package org.semanticweb.vlog4j.syntax.parser; + +/*- + * #%L + * VLog4j Syntax + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.mockito.Mockito.*; + +import org.junit.Test; +import org.semanticweb.vlog4j.parser.DataSourceDeclarationHandler; +import org.semanticweb.vlog4j.parser.DatatypeConstantHandler; +import org.semanticweb.vlog4j.parser.ParserConfiguration; +import org.semanticweb.vlog4j.parser.ParsingException; + +public class ParserConfigurationTest { + private static final String TYPE_NAME = "test-type"; + private static final String SOURCE_NAME = "test-source"; + + private final DatatypeConstantHandler datatypeConstantHandler = mock(DatatypeConstantHandler.class); + private final DataSourceDeclarationHandler dataSourceDeclarationHandler = mock(DataSourceDeclarationHandler.class); + + @Test(expected = IllegalArgumentException.class) + public void registerDataSource_duplicateName_throws() { + ParserConfiguration parserConfiguration = new ParserConfiguration(); + + parserConfiguration.registerDataSource(SOURCE_NAME, dataSourceDeclarationHandler) + .registerDataSource(SOURCE_NAME, dataSourceDeclarationHandler); + } + + @Test(expected = IllegalArgumentException.class) + public void registerDatatype_duplicateName_throws() { + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerDatatype(TYPE_NAME, datatypeConstantHandler).registerDatatype(TYPE_NAME, + datatypeConstantHandler); + } + + @Test + public void registerDataSource_datatypeName_succeeds() { + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerDatatype(TYPE_NAME, datatypeConstantHandler).registerDataSource(TYPE_NAME, + dataSourceDeclarationHandler); + } + + @Test + public void registerDatatype_dataSourceName_succeeds() { + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerDataSource(SOURCE_NAME, dataSourceDeclarationHandler).registerDatatype(SOURCE_NAME, + datatypeConstantHandler); + } + + @Test(expected = IllegalArgumentException.class) + public void parseConstant_languageTagWithExplictDatatype_throws() throws ParsingException { + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.parseConstant("test", "test", "test"); + } + +} From db02e1b4f39acdb3093932d9d1a3b9c3192b3406 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 5 Dec 2019 17:58:16 +0100 Subject: [PATCH 0442/1003] Parser: Expand javadoc w.r.t. DATASOURCE/DataSource type --- .../vlog4j/parser/DataSourceDeclarationHandler.java | 5 ++++- .../vlog4j/parser/ParserConfiguration.java | 12 +++++++++++- 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java index c191ffa76..ff2907ca9 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java @@ -35,13 +35,16 @@ public interface DataSourceDeclarationHandler { /** * Parse a Data Source Declaration. * + * This is called by the parser to instantiate the {@link DataSource} + * component of a {@link DataSourceDeclaration}. + * * @param arguments Arguments given to the Data Source declaration. * @param subParserFactory a factory for obtaining a SubParser, sharing the * parser's state, but bound to new input. * * @throws ParsingException when the given arity or arguments are invalid for * the Data Source. - * @return a @{link DataSource} instance corresponding to the given arguments. + * @return a {@link DataSource} instance corresponding to the given arguments. */ public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) throws ParsingException; diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index 695decb21..c1639a430 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -47,7 +47,14 @@ public class ParserConfiguration { private HashMap datatypes = new HashMap<>(); /** - * Register a new Data Source. + * Register a new (type of) Data Source. + * + * This registers a handler for some custom value of the {@code DATASOURCE} + * production of the rules grammar, corresponding to some {@link DataSource} + * type. + * + * @see <"https://github.com/knowsys/vlog4j/wiki/Rule-syntax-grammar"> for the + * grammar. * * @param name Name of the data source, as it appears in the declaring * directive. @@ -67,6 +74,9 @@ public ParserConfiguration registerDataSource(String name, DataSourceDeclaration /** * Parse a Data Source declaration. * + * This is called by the parser to construct a {@link DataSourceDeclaration}. It + * is responsible for instantiating an appropriate {@link DataSource} type. + * * @param name Name of the data source. * @param args arguments given in the data source declaration. * @param subParserFactory a {@link SubParserFactory} instance that creates From f21c7cbce904a39801f174eeafb31605c62c3513 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 5 Dec 2019 18:07:39 +0100 Subject: [PATCH 0443/1003] Parser: Verify arity matches number of variables in SPARQL sources --- .../SparqlQueryResultDataSource.java | 15 ++++++++++----- .../syntax/parser/RuleParserDataSourceTest.java | 8 +++++--- 2 files changed, 15 insertions(+), 8 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java index 8eb8168b6..cadeeae79 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -23,6 +23,7 @@ import java.net.URL; import java.util.Iterator; import java.util.LinkedHashSet; +import java.util.Optional; import org.apache.commons.lang3.Validate; import org.eclipse.jdt.annotation.NonNull; @@ -32,7 +33,7 @@ /** * A SparqlQueryResultDataSource provide the results of a SPARQL query on a * given web endpoint. - * + * * @author Irina Dragoste * */ @@ -46,7 +47,7 @@ public class SparqlQueryResultDataSource extends VLogDataSource { /** * Creates a data source from answers to a remote SPARQL query. - * + * * @param endpoint web location of the resource the query will be * evaluated on * @param queryVariables comma-separated list of SPARQL variable names (without @@ -69,7 +70,7 @@ public SparqlQueryResultDataSource(final URL endpoint, final String queryVariabl /** * Creates a data source from answers to a remote SPARQL query. - * + * * @param endpoint the web location of the resource the query will be * evaluated on. * @param queryVariables the variables of the query, in the given order. The @@ -135,6 +136,10 @@ static String getQueryVariablesList(LinkedHashSet queryVariables) { return sb.toString(); } + public Optional getRequiredArity() { + return Optional.of(this.queryVariables.split(",").length); + } + @Override public int hashCode() { final int prime = 31; diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java index 90c7074a1..509d752e2 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java @@ -83,8 +83,10 @@ public void testSparqlSource() throws ParsingException, MalformedURLException { } @Test(expected = ParsingException.class) - public void testSparqlSourceMalformedUrl() throws ParsingException, MalformedURLException { - String input = "@source p[2] : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; + public void parseDataSourceDeclaration_sparqlSourceInvalidArity_throws() + throws ParsingException, MalformedURLException { + String input = "@source p[3] : sparql(<" + WIKIDATA_SPARQL_ENDPOINT_URI + + ">,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; RuleParser.parseDataSourceDeclaration(input); } @@ -113,7 +115,7 @@ public void testCustomDataSource() throws ParsingException { @Test public void sparqlDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { KnowledgeBase kb = new KnowledgeBase(); - Predicate predicate1 = Expressions.makePredicate("p", 3); + Predicate predicate1 = Expressions.makePredicate("p", 1); SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(new URL(WIKIDATA_SPARQL_ENDPOINT_URI), "var", "?var wdt:P31 wd:Q5 ."); DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource); From 6eed41dcd5713baa8aa286c31cf919b04ce5dad4 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 5 Dec 2019 18:08:53 +0100 Subject: [PATCH 0444/1003] Parser: Add more test cases for invalid inputs --- .../parser/RuleParserDataSourceTest.java | 44 +++++++++++++++++++ .../vlog4j/syntax/parser/RuleParserTest.java | 12 +++++ 2 files changed, 56 insertions(+) diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java index 509d752e2..f569175f1 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java @@ -90,6 +90,45 @@ public void parseDataSourceDeclaration_sparqlSourceInvalidArity_throws() RuleParser.parseDataSourceDeclaration(input); } + @Test(expected = ParsingException.class) + public void parseDataSourceDeclaration_sparqlSourceMalformedUri_throws() + throws ParsingException, MalformedURLException { + String input = "@source p[3] : sparql(\"\",\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; + RuleParser.parseDataSourceDeclaration(input); + } + + @Test(expected = ParsingException.class) + public void parseDataSourceDeclaration_sparqlSourceUnknownPrefix_throws() + throws ParsingException, MalformedURLException { + String input = "@source p[3] : sparql(\"wdqs:sparql\",\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; + RuleParser.parseDataSourceDeclaration(input); + } + + @Test(expected = ParsingException.class) + public void parseDataSourceDeclaration_sparqlSourceUnparseableUrl_throws() + throws ParsingException, MalformedURLException { + String input = "@source p[3] : sparql(\"wdqs:\",\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; + RuleParser.parseDataSourceDeclaration(input); + } + + @Test(expected = ParsingException.class) + public void testSparqlSourceMalformedUrl() throws ParsingException, MalformedURLException { + String input = "@source p[2] : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; + RuleParser.parseDataSourceDeclaration(input); + } + + @Test(expected = ParsingException.class) + public void parseDataSourceDeclaration_csvSourceInvalidPath_throws() throws ParsingException { + String input = "@source p[1] : load-csv(\"\0.csv\") ."; + RuleParser.parseDataSourceDeclaration(input); + } + + @Test(expected = ParsingException.class) + public void parseDataSourceDeclaration_rdfSourceInvalidPath_throws() throws ParsingException { + String input = "@source p[3] : load-rdf(\"\0.nt\") ."; + RuleParser.parseDataSourceDeclaration(input); + } + @Test(expected = ParsingException.class) public void testUnknownDataSource() throws ParsingException { String input = "@source p[2] : unknown-data-source(\"hello, world\") ."; @@ -144,4 +183,9 @@ public void csvDataSourceDeclarationToStringParsingTest() throws ParsingExceptio RuleParser.parseInto(kb, dataSourceDeclaration.toString()); assertEquals(dataSourceDeclaration, kb.getDataSourceDeclarations().get(0)); } + + @Test(expected = ParsingException.class) + public void sparqlDataSourceDeclaration_invalidNumberOfArguments_throws() throws ParsingException { + RuleParser.parseDataSourceDeclaration("@source p[1] : sparql(<" + WIKIDATA_SPARQL_ENDPOINT_URI + ">) ."); + } } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 58bab4a65..11d685491 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -254,6 +254,18 @@ public void testStringLiteralAllEscapes() throws ParsingException { assertEquals(fact, RuleParser.parseLiteral(input)); } + @Test(expected = ParsingException.class) + public void parseLiteral_invalidEscapeSequence_throws() throws ParsingException { + String input = "p(\"\\ÿ\")"; + RuleParser.parseLiteral(input); + } + + @Test(expected = ParsingException.class) + public void parseLiteral_incompleteEscapeAtEndOfLiteral_throws() throws ParsingException { + String input = "p(\"\\\")"; + RuleParser.parseLiteral(input); + } + @Test public void testStringLiteralMultiLine() throws ParsingException { String input = "p('''line 1\n\n" + "line 2\n" + "line 3''')"; // User input: p("a\"b\\c") From cbc896379ca7b784d3185dbe723e7794502149fd Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 5 Dec 2019 18:41:44 +0100 Subject: [PATCH 0445/1003] Parser: Require Entity in RuleParser#parseSyntaxFragment --- .../main/java/org/semanticweb/vlog4j/parser/RuleParser.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index 50e18558f..3f0a5aa71 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -27,6 +27,7 @@ import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.Entity; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; @@ -124,7 +125,7 @@ public static KnowledgeBase parse(final String input) throws ParsingException { * This is needed to specify the exceptions thrown by the parse method. */ @FunctionalInterface - interface SyntaxFragmentParser { + interface SyntaxFragmentParser { T parse(final JavaCCParser parser) throws ParsingException, ParseException, PrefixDeclarationException, TokenMgrError; } @@ -140,7 +141,7 @@ T parse(final JavaCCParser parser) * @throws ParsingException when an error during parsing occurs. * @return an appropriate instance of {@code T} */ - static T parseSyntaxFragment(final String input, SyntaxFragmentParser parserAction, + static T parseSyntaxFragment(final String input, SyntaxFragmentParser parserAction, final String syntaxFragmentType, final ParserConfiguration parserConfiguration) throws ParsingException { final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); final JavaCCParser localParser = new JavaCCParser(inputStream, DEFAULT_STRING_ENCODING); From d7e1701fc788c688a8662cb77509d62a8c1a5415 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 5 Dec 2019 18:44:17 +0100 Subject: [PATCH 0446/1003] Parser: Rename parserConfiguration#parseDataSourceDeclaration --- .../org/semanticweb/vlog4j/parser/ParserConfiguration.java | 4 ++-- .../java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index c1639a430..c0553fff9 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -72,7 +72,7 @@ public ParserConfiguration registerDataSource(String name, DataSourceDeclaration } /** - * Parse a Data Source declaration. + * Parse the source-specific part of a Data Source declaration. * * This is called by the parser to construct a {@link DataSourceDeclaration}. It * is responsible for instantiating an appropriate {@link DataSource} type. @@ -87,7 +87,7 @@ public ParserConfiguration registerDataSource(String name, DataSourceDeclaration * * @return the Data Source instance. */ - public DataSource parseDataSourceDeclaration(String name, List args, + public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(String name, List args, final SubParserFactory subParserFactory) throws ParsingException { DataSourceDeclarationHandler handler = dataSources.get(name); diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index cd1f0c045..7fe8d5a24 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -122,7 +122,7 @@ DataSource dataSource() throws PrefixDeclarationException: (sourceName = < DIRECTIVENAME > | sourceName = < VARORPREDNAME >) < LPAREN > arguments = Arguments() < RPAREN > { try { - return parserConfiguration.parseDataSourceDeclaration(sourceName.image, arguments, getSubParserFactory()); + return parserConfiguration.parseDataSourceSpecificPartOfDataSourceDeclaration(sourceName.image, arguments, getSubParserFactory()); } catch (ParsingException e) { throw makeParseExceptionWithCause(e); } From 0f8d5400403a54b94dc4d416ff1f2bdeccb01b35 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 5 Dec 2019 19:14:17 +0100 Subject: [PATCH 0447/1003] Parser: Address various review comments --- vlog4j-parser/pom.xml | 6 +++--- .../vlog4j/parser/ParserConfiguration.java | 16 ++++++++++------ .../semanticweb/vlog4j/parser/RuleParser.java | 2 +- .../CsvFileDataSourceDeclarationHandler.java | 3 ++- .../RdfFileDataSourceDeclarationHandler.java | 3 ++- ...QueryResultDataSourceDeclarationHandler.java | 16 +++++++++------- .../vlog4j/parser/javacc/JavaCCParserBase.java | 13 ++++++++----- .../vlog4j/parser/javacc/SubParserFactory.java | 17 +++++++++-------- .../vlog4j/syntax/parser/RuleParserTest.java | 2 +- 9 files changed, 45 insertions(+), 33 deletions(-) diff --git a/vlog4j-parser/pom.xml b/vlog4j-parser/pom.xml index f05d6ef5a..ce8616874 100644 --- a/vlog4j-parser/pom.xml +++ b/vlog4j-parser/pom.xml @@ -47,9 +47,9 @@ - org.codehaus.mojo - build-helper-maven-plugin - 1.8 + org.codehaus.mojo + build-helper-maven-plugin + 1.8 generate-sources diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index c0553fff9..34bf895e3 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -121,15 +121,19 @@ public Constant parseConstant(String lexicalForm, String languageTag, String dat if (languageTag != null) { return Expressions.makeLanguageStringConstant(lexicalForm, languageTag); } else { - String type = ((datatype != null) ? datatype : PrefixDeclarations.XSD_STRING); - DatatypeConstantHandler handler = datatypes.get(type); + return parseDatatypeConstant(lexicalForm, datatype); + } + } - if (handler != null) { - return handler.createConstant(lexicalForm); - } + private Constant parseDatatypeConstant(String lexicalForm, String datatype) throws ParsingException { + String type = ((datatype != null) ? datatype : PrefixDeclarations.XSD_STRING); + DatatypeConstantHandler handler = datatypes.get(type); - return Expressions.makeDatatypeConstant(lexicalForm, type); + if (handler != null) { + return handler.createConstant(lexicalForm); } + + return Expressions.makeDatatypeConstant(lexicalForm, type); } /** diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index 3f0a5aa71..431a01d0f 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -49,7 +49,7 @@ */ public class RuleParser { - private static final String DEFAULT_STRING_ENCODING = "UTF-8"; + public static final String DEFAULT_STRING_ENCODING = "UTF-8"; private static Logger LOGGER = LoggerFactory.getLogger(RuleParser.class); diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java index cf585e7e6..8b7db9640 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java @@ -41,9 +41,10 @@ public DataSource handleDeclaration(List arguments, final SubParserFacto throws ParsingException { DataSourceDeclarationHandler.validateNumberOfArguments(arguments, 1); String fileName = arguments.get(0); + File file = new File(fileName); try { - return new CsvFileDataSource(new File(fileName)); + return new CsvFileDataSource(file); } catch (IOException e) { throw new ParsingException("Could not use source file \"" + fileName + "\": " + e.getMessage(), e); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java index 475f66d59..a17145e19 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java @@ -41,9 +41,10 @@ public DataSource handleDeclaration(List arguments, final SubParserFacto throws ParsingException { DataSourceDeclarationHandler.validateNumberOfArguments(arguments, 1); String fileName = arguments.get(0); + File file = new File(fileName); try { - return new RdfFileDataSource(new File(fileName)); + return new RdfFileDataSource(file); } catch (IOException e) { throw new ParsingException("Could not use source file \"" + fileName + "\": " + e.getMessage(), e); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java index ebfacabf3..71af97bdf 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java @@ -2,7 +2,7 @@ /*- * #%L - * vlog4j-parser + * VLog4j Parser * %% * Copyright (C) 2018 - 2019 VLog4j Developers * %% @@ -20,15 +20,12 @@ * #L% */ -import java.io.File; -import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.util.List; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; import org.semanticweb.vlog4j.parser.DataSourceDeclarationHandler; import org.semanticweb.vlog4j.parser.ParsingException; @@ -48,12 +45,17 @@ public DataSource handleDeclaration(List arguments, final SubParserFacto DataSourceDeclarationHandler.validateNumberOfArguments(arguments, 3); String endpoint = arguments.get(0); - URL endpointUrl; + JavaCCParser parser = subParserFactory.makeSubParser(endpoint); + String parsedEndpoint; try { - JavaCCParser parser = subParserFactory.makeSubParser(endpoint); - endpointUrl = new URL(parser.IRI(false)); + parsedEndpoint = parser.IRI(false); } catch (ParseException | PrefixDeclarationException e) { throw new ParsingException(e); + } + + URL endpointUrl; + try { + endpointUrl = new URL(parsedEndpoint); } catch (MalformedURLException e) { throw new ParsingException("SPARQL endpoint \"" + endpoint + "\" is not a valid URL: " + e.getMessage(), e); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 08d9ab261..7a2fbede1 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -23,6 +23,7 @@ import java.util.HashSet; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; +import org.semanticweb.vlog4j.core.model.api.AbstractConstant; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.Predicate; @@ -62,11 +63,11 @@ public class JavaCCParserBase { /** * "Local" variable to remember existential head variables during parsing. */ - protected final HashSet headExiVars = new HashSet();; + protected final HashSet headExiVars = new HashSet(); /** * "Local" variable to remember universal head variables during parsing. */ - protected final HashSet headUniVars = new HashSet();; + protected final HashSet headUniVars = new HashSet(); /** * Defines the context for parsing sub-formulas. @@ -93,12 +94,14 @@ public JavaCCParserBase() { this.parserConfiguration = new DefaultParserConfiguration(); } - Constant createConstant(String lexicalForm) throws ParseException { + AbstractConstant createConstant(String lexicalForm) throws ParseException { + String absoluteIri; try { - return Expressions.makeAbstractConstant(prefixDeclarations.absolutize(lexicalForm)); + absoluteIri = prefixDeclarations.absolutize(lexicalForm); } catch (PrefixDeclarationException e) { throw makeParseExceptionWithCause(e); } + return Expressions.makeAbstractConstant(absoluteIri); } Constant createConstant(String lexicalForm, String datatype) throws ParseException { @@ -124,7 +127,7 @@ Constant createConstant(String lexicalForm, String languageTag, String datatype) void addDataSource(String predicateName, int arity, DataSource dataSource) throws ParseException { if (dataSource.getRequiredArity().isPresent()) { Integer requiredArity = dataSource.getRequiredArity().get(); - if (requiredArity != arity) { + if (arity != requiredArity) { throw new ParseException( "Invalid arity " + arity + " for data source, " + "expected " + requiredArity + "."); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java index ddea292b8..1b9deccaa 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java @@ -26,6 +26,7 @@ import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.ParserConfiguration; +import org.semanticweb.vlog4j.parser.RuleParser; /** * Factory for creating a SubParser sharing configuration, state, and @@ -35,6 +36,10 @@ * @author Maximilian Marx */ public class SubParserFactory { + private KnowledgeBase knowledgeBase; + private ParserConfiguration parserConfiguration; + private PrefixDeclarations prefixDeclarations; + /** * Construct a SubParserFactory. * @@ -57,22 +62,18 @@ public class SubParserFactory { */ public JavaCCParser makeSubParser(final InputStream inputStream, final String encoding) { JavaCCParser subParser = new JavaCCParser(inputStream, encoding); - subParser.setKnowledgeBase(knowledgeBase); - subParser.setPrefixDeclarations(prefixDeclarations); - subParser.setParserConfiguration(parserConfiguration); + subParser.setKnowledgeBase(this.knowledgeBase); + subParser.setPrefixDeclarations(this.prefixDeclarations); + subParser.setParserConfiguration(this.parserConfiguration); return subParser; } public JavaCCParser makeSubParser(final InputStream inputStream) { - return makeSubParser(inputStream, "UTF-8"); + return makeSubParser(inputStream, RuleParser.DEFAULT_STRING_ENCODING); } public JavaCCParser makeSubParser(final String string) { return makeSubParser(new ByteArrayInputStream(string.getBytes())); } - - private KnowledgeBase knowledgeBase; - private ParserConfiguration parserConfiguration; - private PrefixDeclarations prefixDeclarations; } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 11d685491..29768f5b1 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -367,7 +367,7 @@ public void testBlankPredicateName() throws ParsingException { } @Test(expected = ParsingException.class) - public void testInvalidDatatypeOnLiteral() throws ParsingException { + public void parseLiteral_invalidLiteralString_throws() throws ParsingException { final String input = "P(\"a\")^^whatever"; RuleParser.parseLiteral(input); } From f860a9cbde63a98e9fb294e3561fc127ffc3d160 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 5 Dec 2019 19:44:36 +0100 Subject: [PATCH 0448/1003] Parser: Re-use constants for default data source names --- .../vlog4j/core/model/implementation/Serializer.java | 4 ++-- .../vlog4j/parser/DefaultParserConfiguration.java | 8 +++++--- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 7726b506f..75dac206a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -63,8 +63,8 @@ public final class Serializer { public static final String AT = "@"; public static final String DATA_SOURCE = "@source "; public static final String CSV_FILE_DATA_SOURCE = "load-csv"; - private static final String RDF_FILE_DATA_SOURCE = "load-rdf"; - private static final String SPARQL_QUERY_RESULT_DATA_SOURCE = "sparql"; + public static final String RDF_FILE_DATA_SOURCE = "load-rdf"; + public static final String SPARQL_QUERY_RESULT_DATA_SOURCE = "sparql"; public static final String DATA_SOURCE_SEPARATOR = ": "; public static final String COLON = ":"; public static final String DOUBLE_CARET = "^^"; diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java index 168a738bb..a52f02b87 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java @@ -20,6 +20,7 @@ * #L% */ +import org.semanticweb.vlog4j.core.model.implementation.Serializer; import org.semanticweb.vlog4j.parser.datasources.CsvFileDataSourceDeclarationHandler; import org.semanticweb.vlog4j.parser.datasources.RdfFileDataSourceDeclarationHandler; import org.semanticweb.vlog4j.parser.datasources.SparqlQueryResultDataSourceDeclarationHandler; @@ -39,8 +40,9 @@ public DefaultParserConfiguration() { * Register built-in data sources (currently CSV, RDF, SPARQL). */ private void registerDefaultDataSources() { - registerDataSource("load-csv", new CsvFileDataSourceDeclarationHandler()); - registerDataSource("load-rdf", new RdfFileDataSourceDeclarationHandler()); - registerDataSource("sparql", new SparqlQueryResultDataSourceDeclarationHandler()); + registerDataSource(Serializer.CSV_FILE_DATA_SOURCE, new CsvFileDataSourceDeclarationHandler()); + registerDataSource(Serializer.RDF_FILE_DATA_SOURCE, new RdfFileDataSourceDeclarationHandler()); + registerDataSource(Serializer.SPARQL_QUERY_RESULT_DATA_SOURCE, + new SparqlQueryResultDataSourceDeclarationHandler()); } } From bb0183905f9a58ab9773b4ba52517510878bd6ff Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 5 Dec 2019 19:58:19 +0100 Subject: [PATCH 0449/1003] Core: Properly escape file paths when serialising data sources --- .../vlog4j/core/model/implementation/Serializer.java | 2 +- .../vlog4j/syntax/parser/RuleParserDataSourceTest.java | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 75dac206a..164a26f97 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -322,7 +322,7 @@ public static String getString(final SparqlQueryResultDataSource dataSource) { } private static String getFileString(final FileDataSource fileDataSource) { - return addQuotes(fileDataSource.getFile().toString()); + return addQuotes(escape(fileDataSource.getFile().toString())); } private static String getIRIString(final String string) { diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java index f569175f1..5a357f407 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java @@ -188,4 +188,9 @@ public void csvDataSourceDeclarationToStringParsingTest() throws ParsingExceptio public void sparqlDataSourceDeclaration_invalidNumberOfArguments_throws() throws ParsingException { RuleParser.parseDataSourceDeclaration("@source p[1] : sparql(<" + WIKIDATA_SPARQL_ENDPOINT_URI + ">) ."); } + + @Test + public void parseDataSourceDeclaration_windowsStylePathName_success() throws ParsingException, IOException { + RuleParser.parseDataSourceDeclaration("@source p[1] : load-csv(\"\\\\test\\\\with\\\\backslashes.csv\") ."); + } } From 8d50ad95ac118144c645c59d4bea695af607408f Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 5 Dec 2019 20:20:22 +0100 Subject: [PATCH 0450/1003] Core: Handle all(*) escape sequences in Serializer for XSD_STRING (*) except for single quotes, which should be left alone, since we are serialising to double-quoted strings. Fixes #144. --- .../core/model/implementation/Serializer.java | 12 +++++++-- .../vlog4j/syntax/parser/RuleParserTest.java | 27 ++++++++++++++++--- 2 files changed, 34 insertions(+), 5 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 164a26f97..ad91d9aa8 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -191,7 +191,7 @@ public static String getConstantName(final LanguageStringConstant languageString */ public static String getString(final DatatypeConstant datatypeConstant) { if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_STRING)) { - return addQuotes(datatypeConstant.getLexicalValue()); + return addQuotes(escape(datatypeConstant.getLexicalValue())); } else { if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DECIMAL) || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_INTEGER) @@ -335,7 +335,15 @@ private static String getIRIString(final String string) { } private static String escape(final String string) { - return string.replace("\\", "\\\\").replace("\"", "\\\""); + return string + .replace("\\", "\\\\") + .replace("\"", "\\\"") + .replace("\t", "\\t") + .replace("\b", "\\b") + .replace("\n", "\\n") + .replace("\r", "\\r") + .replace("\f", "\\f"); + // don't touch single quotes here since we only construct double-quoted strings } private static String addQuotes(final String string) { diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 29768f5b1..d5bcf7753 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -238,7 +238,7 @@ public void testIncompleteStringLiteral() throws ParsingException { } @Test - public void testStringLiteralEscapes() throws ParsingException { + public void parseLiteral_escapeSequences_success() throws ParsingException { String input = "p(\"_\\\"_\\\\_\\n_\\t_\")"; // User input: p("_\"_\\_\n_\t_") PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("_\"_\\_\n_\t_", PrefixDeclarations.XSD_STRING)); @@ -246,7 +246,14 @@ public void testStringLiteralEscapes() throws ParsingException { } @Test - public void testStringLiteralAllEscapes() throws ParsingException { + public void parseLiteral_escapeSequences_roundTrips() throws ParsingException { + PositiveLiteral fact = Expressions.makePositiveLiteral("p", + Expressions.makeDatatypeConstant("_\"_\\_\n_\t_", PrefixDeclarations.XSD_STRING)); + assertEquals(fact, RuleParser.parseLiteral(fact.getSyntacticRepresentation())); + } + + @Test + public void parseLiteral_allEscapeSequences_success() throws ParsingException { // User input: p("_\n_\t_\r_\b_\f_\'_\"_\\_") String input = "p(\"_\\n_\\t_\\r_\\b_\\f_\\'_\\\"_\\\\_\")"; PositiveLiteral fact = Expressions.makePositiveLiteral("p", @@ -254,6 +261,13 @@ public void testStringLiteralAllEscapes() throws ParsingException { assertEquals(fact, RuleParser.parseLiteral(input)); } + @Test + public void parseLiteral_allEscapeSequences_roundTrips() throws ParsingException { + PositiveLiteral fact = Expressions.makePositiveLiteral("p", + Expressions.makeDatatypeConstant("_\n_\t_\r_\b_\f_\'_\"_\\_", PrefixDeclarations.XSD_STRING)); + assertEquals(fact, RuleParser.parseLiteral(fact.getSyntacticRepresentation())); + } + @Test(expected = ParsingException.class) public void parseLiteral_invalidEscapeSequence_throws() throws ParsingException { String input = "p(\"\\ÿ\")"; @@ -267,13 +281,20 @@ public void parseLiteral_incompleteEscapeAtEndOfLiteral_throws() throws ParsingE } @Test - public void testStringLiteralMultiLine() throws ParsingException { + public void parseLiteral_multiLineLiteral_success() throws ParsingException { String input = "p('''line 1\n\n" + "line 2\n" + "line 3''')"; // User input: p("a\"b\\c") PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("line 1\n\nline 2\nline 3", PrefixDeclarations.XSD_STRING)); assertEquals(fact, RuleParser.parseLiteral(input)); } + @Test + public void parseLiteral_multiLineLiteral_roundTrips() throws ParsingException { + PositiveLiteral fact = Expressions.makePositiveLiteral("p", + Expressions.makeDatatypeConstant("line 1\n\nline 2\nline 3", PrefixDeclarations.XSD_STRING)); + assertEquals(fact, RuleParser.parseLiteral(fact.getSyntacticRepresentation())); + } + @Test(expected = ParsingException.class) public void testIncompleteStringLiteralMultiLine() throws ParsingException { String input = "p('''abc\ndef'')"; From c4823ab0dc73168b8026ecd08f3fea8c6e88fb71 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 6 Dec 2019 14:42:54 +0100 Subject: [PATCH 0451/1003] Parser: Address review comments --- .../implementation/SparqlQueryResultDataSource.java | 1 + .../vlog4j/parser/DataSourceDeclarationHandler.java | 8 ++++---- .../semanticweb/vlog4j/parser/ParserConfiguration.java | 4 ++-- .../SparqlQueryResultDataSourceDeclarationHandler.java | 2 +- .../org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj | 2 +- .../vlog4j/parser/javacc/JavaCCParserBase.java | 8 ++------ .../semanticweb/vlog4j/syntax/parser/RuleParserTest.java | 2 +- 7 files changed, 12 insertions(+), 15 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java index cadeeae79..89db26939 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java @@ -136,6 +136,7 @@ static String getQueryVariablesList(LinkedHashSet queryVariables) { return sb.toString(); } + @Override public Optional getRequiredArity() { return Optional.of(this.queryVariables.split(",").length); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java index ff2907ca9..19ef07ba2 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java @@ -35,15 +35,15 @@ public interface DataSourceDeclarationHandler { /** * Parse a Data Source Declaration. * - * This is called by the parser to instantiate the {@link DataSource} - * component of a {@link DataSourceDeclaration}. + * This is called by the parser to instantiate the {@link DataSource} component + * of a {@link DataSourceDeclaration}. * * @param arguments Arguments given to the Data Source declaration. * @param subParserFactory a factory for obtaining a SubParser, sharing the * parser's state, but bound to new input. * - * @throws ParsingException when the given arity or arguments are invalid for - * the Data Source. + * @throws ParsingException when any of the arguments is invalid for the Data + * Source, or the number of arguments is invalid. * @return a {@link DataSource} instance corresponding to the given arguments. */ public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index 34bf895e3..a3b16dcba 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -109,8 +109,8 @@ public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(String name * * @throws ParsingException when the lexical form is invalid for the * given data type. - * @throws IllegalArgumentException when neither {@code languageTag} and - * {@code datatype} are null. + * @throws IllegalArgumentException when both {@code languageTag} and + * {@code datatype} are non-null. * @return the {@link Constant} corresponding to the given arguments. */ public Constant parseConstant(String lexicalForm, String languageTag, String datatype) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java index 71af97bdf..3524fcb0f 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java @@ -50,7 +50,7 @@ public DataSource handleDeclaration(List arguments, final SubParserFacto try { parsedEndpoint = parser.IRI(false); } catch (ParseException | PrefixDeclarationException e) { - throw new ParsingException(e); + throw new ParsingException("Error while parsing endpoint IRI in SPARQL query data source: " + e.getMessage(), e); } URL endpointUrl; diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 7fe8d5a24..2a127ba9b 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -124,7 +124,7 @@ DataSource dataSource() throws PrefixDeclarationException: try { return parserConfiguration.parseDataSourceSpecificPartOfDataSourceDeclaration(sourceName.image, arguments, getSubParserFactory()); } catch (ParsingException e) { - throw makeParseExceptionWithCause(e); + throw makeParseExceptionWithCause("Failed while trying to parse the source-specific part of a data source declaration", e); } } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 7a2fbede1..63173e270 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -99,7 +99,7 @@ AbstractConstant createConstant(String lexicalForm) throws ParseException { try { absoluteIri = prefixDeclarations.absolutize(lexicalForm); } catch (PrefixDeclarationException e) { - throw makeParseExceptionWithCause(e); + throw makeParseExceptionWithCause("Failed to parse IRI", e); } return Expressions.makeAbstractConstant(absoluteIri); } @@ -120,7 +120,7 @@ Constant createConstant(String lexicalForm, String languageTag, String datatype) try { return parserConfiguration.parseConstant(lexicalForm, languageTag, datatype); } catch (ParsingException e) { - throw makeParseExceptionWithCause(e); + throw makeParseExceptionWithCause("Failed to parse Constant", e); } } @@ -249,10 +249,6 @@ protected ParseException makeParseExceptionWithCause(String message, Throwable c return parseException; } - protected ParseException makeParseExceptionWithCause(Throwable cause) { - return makeParseExceptionWithCause(cause.getMessage(), cause); - } - public void setKnowledgeBase(KnowledgeBase knowledgeBase) { this.knowledgeBase = knowledgeBase; } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index d5bcf7753..971202610 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -411,7 +411,7 @@ public void testIriTypeInDatatypeLiteral() throws ParsingException { @Test public void predicateRelativeNumericIRITest() throws ParsingException { AbstractConstantImpl a = new AbstractConstantImpl("a"); - Fact f = RuleParser.parseFact("<1.e1>(a)."); + Fact f = RuleParser.parseFact("<1.e1>(a)."); // 1.e1 == "10"^^xsd:double Fact f2 = Expressions.makeFact("1.e1", a); assertEquals(f, f2); } From b846f1d709a43305406218c1fc78d6152f0b8ba4 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Fri, 6 Dec 2019 16:30:30 +0100 Subject: [PATCH 0452/1003] update to release version 0.5.0 --- coverage/pom.xml | 14 +++++++------- pom.xml | 2 +- vlog4j-client/pom.xml | 2 +- vlog4j-core/pom.xml | 2 +- vlog4j-examples/pom.xml | 2 +- vlog4j-graal/pom.xml | 2 +- vlog4j-owlapi/pom.xml | 2 +- vlog4j-parser/pom.xml | 2 +- vlog4j-rdf/pom.xml | 2 +- 9 files changed, 15 insertions(+), 15 deletions(-) diff --git a/coverage/pom.xml b/coverage/pom.xml index e158f1d91..0f2270cb3 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0-SNAPSHOT + 0.5.0 coverage @@ -16,32 +16,32 @@ org.semanticweb.vlog4j vlog4j-core - 0.5.0-SNAPSHOT + 0.5.0 org.semanticweb.vlog4j vlog4j-rdf - 0.5.0-SNAPSHOT + 0.5.0 org.semanticweb.vlog4j vlog4j-owlapi - 0.5.0-SNAPSHOT + 0.5.0 org.semanticweb.vlog4j vlog4j-graal - 0.5.0-SNAPSHOT + 0.5.0 org.semanticweb.vlog4j vlog4j-parser - 0.5.0-SNAPSHOT + 0.5.0 org.semanticweb.vlog4j vlog4j-client - 0.5.0-SNAPSHOT + 0.5.0 diff --git a/pom.xml b/pom.xml index 8e831b96f..54f123629 100644 --- a/pom.xml +++ b/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0-SNAPSHOT + 0.5.0 pom VLog4j diff --git a/vlog4j-client/pom.xml b/vlog4j-client/pom.xml index f6f9d5169..7bfe4fc43 100644 --- a/vlog4j-client/pom.xml +++ b/vlog4j-client/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0-SNAPSHOT + 0.5.0 vlog4j-client diff --git a/vlog4j-core/pom.xml b/vlog4j-core/pom.xml index a1c646e97..b3c3183c3 100644 --- a/vlog4j-core/pom.xml +++ b/vlog4j-core/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0-SNAPSHOT + 0.5.0 vlog4j-core diff --git a/vlog4j-examples/pom.xml b/vlog4j-examples/pom.xml index 568c6e905..aa3132d08 100644 --- a/vlog4j-examples/pom.xml +++ b/vlog4j-examples/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0-SNAPSHOT + 0.5.0 vlog4j-examples diff --git a/vlog4j-graal/pom.xml b/vlog4j-graal/pom.xml index 0b922bcc5..a5b6724e3 100644 --- a/vlog4j-graal/pom.xml +++ b/vlog4j-graal/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0-SNAPSHOT + 0.5.0 vlog4j-graal diff --git a/vlog4j-owlapi/pom.xml b/vlog4j-owlapi/pom.xml index c4e79c45f..2bb0a6834 100644 --- a/vlog4j-owlapi/pom.xml +++ b/vlog4j-owlapi/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0-SNAPSHOT + 0.5.0 vlog4j-owlapi diff --git a/vlog4j-parser/pom.xml b/vlog4j-parser/pom.xml index ce8616874..14bce0278 100644 --- a/vlog4j-parser/pom.xml +++ b/vlog4j-parser/pom.xml @@ -8,7 +8,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0-SNAPSHOT + 0.5.0 vlog4j-parser diff --git a/vlog4j-rdf/pom.xml b/vlog4j-rdf/pom.xml index 1d69e2284..cd75da4e2 100644 --- a/vlog4j-rdf/pom.xml +++ b/vlog4j-rdf/pom.xml @@ -8,7 +8,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0-SNAPSHOT + 0.5.0 vlog4j-rdf From 89137b8ed83113eed0763f520abed40d1a5b01f1 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 6 Dec 2019 16:31:41 +0100 Subject: [PATCH 0453/1003] update Readme to new release version 5.0.5 --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index e11ad17f7..2051a94ae 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,7 @@ A Java library based on the [VLog rule engine](https://github.com/karmaresearch/ Installation ------------ -The current release of VLog4j is version 0.4.0. The easiest way of using the library is with Maven. Maven users must add the following dependency to the dependencies in their pom.xml file: +The current release of VLog4j is version 0.5.0. The easiest way of using the library is with Maven. Maven users must add the following dependency to the dependencies in their pom.xml file: ``` @@ -50,4 +50,4 @@ Development * The master branch may require a development version of VLog. Use the script `build-vlog-library.sh` to create and install it on your machine (you may need to delete previous local builds first). * Users of Eclipse should install the javacc plugin to generate the parser sources. After installing the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.vlog4j.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. -* We largely follow [Java Programming Style Guidelines published by Petroware](https://petroware.no/javastyle.html). The main exception are the names of private members, which do not usually end in underscores in our code. \ No newline at end of file +* We largely follow [Java Programming Style Guidelines published by Petroware](https://petroware.no/javastyle.html). The main exception are the names of private members, which do not usually end in underscores in our code. From 0b311016129c06fdd9450b0de65891bcdbefebc4 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 6 Dec 2019 16:32:21 +0100 Subject: [PATCH 0454/1003] update README to new release version 0.5.0 --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 2051a94ae..a40a9512e 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ The current release of VLog4j is version 0.5.0. The easiest way of using the lib org.semanticweb.vlog4j vlog4j-core - 0.4.0 + 0.5.0 ``` From 36afeacf15e1dd9c6264649359d8055ad027b917 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 6 Dec 2019 16:36:16 +0100 Subject: [PATCH 0455/1003] update README to link to our syntax in wiki --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index a40a9512e..dbb5a647b 100644 --- a/README.md +++ b/README.md @@ -22,7 +22,7 @@ The current release of VLog4j is version 0.5.0. The easiest way of using the lib You need to use Java 1.8 or above. Available modules include: * **vlog4j-core**: essential data models for rules and facts, and essential reasoner functionality -* **vlog4j-parser**: support for processing knowledge bases in VLog4j syntax +* **vlog4j-parser**: support for processing knowledge bases in [VLog4j syntax](https://github.com/knowsys/vlog4j/wiki/Rule-syntax-grammar) * **vlog4j-graal**: support for converting rules, facts and queries from [Graal](http://graphik-team.github.io/graal/) API objects and [DLGP](http://graphik-team.github.io/graal/doc/dlgp) files * **vlog4j-rdf**: support for reading from RDF files in Java (not required for loading RDF directly during reasoning) * **vlog4j-owlapi**: support for converting rules from OWL ontology, loaded with the OWL API @@ -41,7 +41,7 @@ Documentation * The module **vlog4j-examples** includes short example programs that demonstrate various features and use cases * The GitHub project **[VLog4j Example](https://github.com/knowsys/vlog4j-example)** shows how to use VLog4j in own Maven projects and can be used as a skeleton for own projects * [JavaDoc](https://knowsys.github.io/vlog4j/) is available online and through the Maven packages. -* A VLog4j [Wiki](https://github.com/knowsys/vlog4j/wiki) is available online, with detailed information about vlog4j usage, the supported rule language, and related publications. +* A VLog4j [Wiki](https://github.com/knowsys/vlog4j/wiki) is available online, with detailed information about vlog4j usage, the supported rule language [examples](https://github.com/knowsys/vlog4j/wiki/Rule-syntax-by-examples) and [grammar](https://github.com/knowsys/vlog4j/wiki/Rule-syntax-grammar), and related publications. Development ----------- From 55ca9fed74f28fdb5505b6efe5d8dd507e863e15 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Fri, 6 Dec 2019 18:44:57 +0100 Subject: [PATCH 0456/1003] fixed some javadoc errors --- .../vlog4j/client/picocli/ClientUtils.java | 2 + .../client/picocli/PrintQueryResults.java | 6 +- .../vlog4j/client/picocli/SaveModel.java | 8 +- .../client/picocli/SaveQueryResults.java | 8 +- .../vlog4j/core/model/api/Variable.java | 7 +- .../core/model/implementation/FactImpl.java | 13 +-- .../core/model/implementation/Serializer.java | 42 ++++----- .../vlog4j/core/reasoner/KnowledgeBase.java | 86 +++++++++---------- .../vlog4j/core/reasoner/Reasoner.java | 74 ++++++++-------- .../vlog4j/core/reasoner/ReasonerState.java | 12 ++- .../implementation/InMemoryDataSource.java | 35 ++++---- .../vlog4j/examples/ExamplesUtils.java | 8 +- .../owlapi/OwlOntologyToRulesAndFacts.java | 2 +- .../parser/DataSourceDeclarationHandler.java | 5 +- 14 files changed, 152 insertions(+), 156 deletions(-) diff --git a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/ClientUtils.java b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/ClientUtils.java index 35b10b2b0..c0f81099c 100644 --- a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/ClientUtils.java +++ b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/ClientUtils.java @@ -96,6 +96,8 @@ public static void printOutQueryAnswers(final PositiveLiteral queryAtom, final R * * @param queryAtom query to be answered * @param reasoner reasoner to query on + * + * @return number of answers to the given query */ public static int getQueryAnswerCount(final PositiveLiteral queryAtom, final Reasoner reasoner) { try (final QueryResultIterator answers = reasoner.answerQuery(queryAtom, true)) { diff --git a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/PrintQueryResults.java b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/PrintQueryResults.java index b9d656f3d..1fb824007 100644 --- a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/PrintQueryResults.java +++ b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/PrintQueryResults.java @@ -59,10 +59,10 @@ public PrintQueryResults(final boolean sizeOnly, final boolean complete) { } /** - * Check correct configuration of the class. @code{--print-query-result-size} - * and @code{--print-query-result} are mutually exclusive. + * Check correct configuration of the class. {@code --print-query-result-size} + * and {@code --print-query-result} are mutually exclusive. * - * @return @code{true} if configuration is valid. + * @return {@code true} if configuration is valid. */ public boolean isValid() { return !this.sizeOnly || !this.complete; diff --git a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveModel.java b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveModel.java index 12be0e9d0..5d8fd08a7 100644 --- a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveModel.java +++ b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveModel.java @@ -63,10 +63,10 @@ public SaveModel(final boolean saveModel, final String outputDir) { } /** - * Check correct configuration of the class. If @code{--save-model} is true, - * then a non-empty @code{--output-model-directory} is required. + * Check correct configuration of the class. If {@code --save-model} is true, + * then a non-empty {@code --output-model-directory} is required. * - * @return @code{true} if configuration is valid. + * @return {@code true} if configuration is valid. */ public boolean isConfigurationValid() { return !this.saveModel || ((this.outputModelDirectory != null) && !this.outputModelDirectory.isEmpty()); @@ -75,7 +75,7 @@ public boolean isConfigurationValid() { /** * Check that the path to store the model is either non-existing or a directory. * - * @return @code{true} if conditions are satisfied. + * @return {@code true} if conditions are satisfied. */ public boolean isDirectoryValid() { final File file = new File(this.outputModelDirectory); diff --git a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveQueryResults.java b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveQueryResults.java index ce0345bd0..b694cb532 100644 --- a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveQueryResults.java +++ b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveQueryResults.java @@ -64,10 +64,10 @@ public SaveQueryResults(final boolean saveResults, final String outputDir) { } /** - * Check correct configuration of the class. If @code{--save-query-results} is - * true, then a non-empty @code{--output-query-result-directory} is required. + * Check correct configuration of the class. If {@code --save-query-results} is + * true, then a non-empty {@code --output-query-result-directory} is required. * - * @return @code{true} if configuration is valid. + * @return {@code true} if configuration is valid. */ public boolean isConfigurationValid() { return !this.saveResults @@ -78,7 +78,7 @@ public boolean isConfigurationValid() { * Check that the path to store the query results is either non-existing or a * directory. * - * @return @code{true} if conditions are satisfied. + * @return {@code true} if conditions are satisfied. */ public boolean isDirectoryValid() { final File file = new File(this.outputQueryResultDirectory); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Variable.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Variable.java index ba6a80e16..63b642a93 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Variable.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Variable.java @@ -21,9 +21,10 @@ */ /** - * Interface for variables, i.e., terms of type {@link TermType#VARIABLE}. - * Variables are terms that can be quantified to create formulas that refer to - * some or all values of the domain. + * Interface for variables, i.e., terms of type + * {@link TermType#UNIVERSAL_VARIABLE} and + * {@link TermType#EXISTENTIAL_VARIABLE}. Variables are terms that can be + * quantified to create formulas that refer to some or all values of the domain. * * @author david.carral@tu-dresden.de * @author Markus Krötzsch diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java index d22794133..e1712dd37 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java @@ -28,29 +28,30 @@ import org.semanticweb.vlog4j.core.model.api.Term; /** - * Standard implementation of the {@Fact} interface. + * Standard implementation of the {@link Fact} interface. * * @author Markus Kroetzsch * */ public class FactImpl extends PositiveLiteralImpl implements Fact { - public FactImpl(Predicate predicate, List terms) { + public FactImpl(final Predicate predicate, final List terms) { super(predicate, terms); - for (Term t : terms) { - if (t.isVariable()) + for (final Term t : terms) { + if (t.isVariable()) { throw new IllegalArgumentException("Facts cannot contain variables."); + } } } @Override - public T accept(StatementVisitor statementVisitor) { + public T accept(final StatementVisitor statementVisitor) { return statementVisitor.visit(this); } @Override public String toString() { - return getSyntacticRepresentation(); + return this.getSyntacticRepresentation(); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index ad91d9aa8..b3c9fa9f7 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -88,7 +88,7 @@ private Serializer() { /** * Creates a String representation of a given {@link Rule}. * - * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @see Rule syntax . * @param rule a {@link Rule}. * @return String representation corresponding to a given {@link Rule}. * @@ -100,7 +100,7 @@ public static String getString(final Rule rule) { /** * Creates a String representation of a given {@link Conjunction}. * - * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @see Rule syntax . * @param conjunction a {@link Conjunction} * @return String representation corresponding to a given {@link Conjunction}. */ @@ -121,7 +121,7 @@ public static String getString(final Conjunction conjunction) /** * Creates a String representation of a given {@link Literal}. * - * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @see Rule syntax . * @param literal a {@link Literal} * @return String representation corresponding to a given {@link Literal}. */ @@ -148,7 +148,7 @@ public static String getString(final Literal literal) { /** * Creates a String representation of a given {@link Fact}. * - * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @see Rule syntax . * @param fact a {@link Fact} * @return String representation corresponding to a given {@link Fact}. */ @@ -159,7 +159,7 @@ public static String getFactString(final Fact fact) { /** * Creates a String representation of a given {@link Constant}. * - * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @see Rule syntax . * @param constant a {@link Constant} * @return String representation corresponding to a given {@link Constant}. */ @@ -171,7 +171,7 @@ public static String getString(final AbstractConstant constant) { * Creates a String representation corresponding to the name of a given * {@link LanguageStringConstant}. * - * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @see Rule syntax . * @param languageStringConstant a {@link LanguageStringConstant} * @return String representation corresponding to the name of a given * {@link LanguageStringConstant}. @@ -184,7 +184,7 @@ public static String getConstantName(final LanguageStringConstant languageString * Creates a String representation corresponding to the name of a given * {@link DatatypeConstant} without an IRI. * - * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @see Rule syntax . * @param datatypeConstant a {@link DatatypeConstant} * @return String representation corresponding to a given * {@link DatatypeConstant}. @@ -207,7 +207,7 @@ public static String getString(final DatatypeConstant datatypeConstant) { * Creates a String representation corresponding to the name of a given * {@link DatatypeConstant} including an IRI. * - * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @see Rule syntax . * @param datatypeConstant a {@link DatatypeConstant} * @return String representation corresponding to a given * {@link DatatypeConstant}. @@ -220,7 +220,7 @@ public static String getConstantName(final DatatypeConstant datatypeConstant) { /** * Creates a String representation of a given {@link ExistentialVariable}. * - * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @see Rule syntax . * @param existentialVariable a {@link ExistentialVariable} * @return String representation corresponding to a given * {@link ExistentialVariable}. @@ -232,7 +232,7 @@ public static String getString(final ExistentialVariable existentialVariable) { /** * Creates a String representation of a given {@link UniversalVariable}. * - * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @see Rule syntax . * @param universalVariable a {@link UniversalVariable} * @return String representation corresponding to a given * {@link UniversalVariable}. @@ -244,7 +244,7 @@ public static String getString(final UniversalVariable universalVariable) { /** * Creates a String representation of a given {@link NamedNull}. * - * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @see Rule syntax . * @param namedNull a {@link NamedNull} * @return String representation corresponding to a given {@link NamedNull}. */ @@ -255,7 +255,7 @@ public static String getString(final NamedNull namedNull) { /** * Creates a String representation of a given {@link Predicate}. * - * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @see Rule syntax . * @param predicate a {@link Predicate} * @return String representation corresponding to a given {@link Predicate}. */ @@ -266,7 +266,7 @@ public static String getString(final Predicate predicate) { /** * Creates a String representation of a given {@link DataSourceDeclaration}. * - * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @see Rule syntax . * @param dataSourceDeclaration a {@link DataSourceDeclaration} * @return String representation corresponding to a given * {@link DataSourceDeclaration}. @@ -279,7 +279,7 @@ public static String getString(final DataSourceDeclaration dataSourceDeclaration /** * Creates a String representation of a given {@link CsvFileDataSource}. * - * @see <"https://github.com/knowsys/vlog4j/wiki">. + * @see Rule syntax .. * * @param csvFileDataSource * @return String representation corresponding to a given @@ -292,7 +292,7 @@ public static String getString(final CsvFileDataSource csvFileDataSource) { /** * Creates a String representation of a given {@link RdfFileDataSource}. * - * @see <"https://github.com/knowsys/vlog4j/wiki">. + * @see Rule syntax .. * * * @param rdfFileDataSource @@ -307,7 +307,7 @@ public static String getString(final RdfFileDataSource rdfFileDataSource) { * Creates a String representation of a given * {@link SparqlQueryResultDataSource}. * - * @see <"https://github.com/knowsys/vlog4j/wiki">. + * @see Rule syntax . * * * @param dataSource @@ -335,14 +335,8 @@ private static String getIRIString(final String string) { } private static String escape(final String string) { - return string - .replace("\\", "\\\\") - .replace("\"", "\\\"") - .replace("\t", "\\t") - .replace("\b", "\\b") - .replace("\n", "\\n") - .replace("\r", "\\r") - .replace("\f", "\\f"); + return string.replace("\\", "\\\\").replace("\"", "\\\"").replace("\t", "\\t").replace("\b", "\\b") + .replace("\n", "\\n").replace("\r", "\\r").replace("\f", "\\f"); // don't touch single quotes here since we only construct double-quoted strings } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java index 8e366147d..54c4a256f 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -63,18 +63,18 @@ public class KnowledgeBase implements Iterable { */ private class AddStatementVisitor implements StatementVisitor { @Override - public Boolean visit(Fact statement) { - addFact(statement); + public Boolean visit(final Fact statement) { + KnowledgeBase.this.addFact(statement); return true; } @Override - public Boolean visit(Rule statement) { + public Boolean visit(final Rule statement) { return true; } @Override - public Boolean visit(DataSourceDeclaration statement) { + public Boolean visit(final DataSourceDeclaration statement) { KnowledgeBase.this.dataSourceDeclarations.add(statement); return true; } @@ -92,18 +92,18 @@ public Boolean visit(DataSourceDeclaration statement) { private class RemoveStatementVisitor implements StatementVisitor { @Override - public Boolean visit(Fact statement) { - removeFact(statement); + public Boolean visit(final Fact statement) { + KnowledgeBase.this.removeFact(statement); return true; } @Override - public Boolean visit(Rule statement) { + public Boolean visit(final Rule statement) { return true; } @Override - public Boolean visit(DataSourceDeclaration statement) { + public Boolean visit(final DataSourceDeclaration statement) { KnowledgeBase.this.dataSourceDeclarations.remove(statement); return true; } @@ -116,7 +116,7 @@ private class ExtractStatementsVisitor implements StatementVisitor { final ArrayList extracted = new ArrayList<>(); final Class ownType; - ExtractStatementsVisitor(Class type) { + ExtractStatementsVisitor(final Class type) { this.ownType = type; } @@ -126,7 +126,7 @@ ArrayList getExtractedStatements() { @SuppressWarnings("unchecked") @Override - public Void visit(Fact statement) { + public Void visit(final Fact statement) { if (this.ownType.equals(Fact.class)) { this.extracted.add((T) statement); } @@ -135,7 +135,7 @@ public Void visit(Fact statement) { @SuppressWarnings("unchecked") @Override - public Void visit(Rule statement) { + public Void visit(final Rule statement) { if (this.ownType.equals(Rule.class)) { this.extracted.add((T) statement); } @@ -144,7 +144,7 @@ public Void visit(Rule statement) { @SuppressWarnings("unchecked") @Override - public Void visit(DataSourceDeclaration statement) { + public Void visit(final DataSourceDeclaration statement) { if (this.ownType.equals(DataSourceDeclaration.class)) { this.extracted.add((T) statement); } @@ -181,7 +181,7 @@ public Void visit(DataSourceDeclaration statement) { * * @param listener */ - public void addListener(KnowledgeBaseListener listener) { + public void addListener(final KnowledgeBaseListener listener) { this.listeners.add(listener); } @@ -190,7 +190,7 @@ public void addListener(KnowledgeBaseListener listener) { * * @param listener */ - public void deleteListener(KnowledgeBaseListener listener) { + public void deleteListener(final KnowledgeBaseListener listener) { this.listeners.remove(listener); } @@ -199,11 +199,10 @@ public void deleteListener(KnowledgeBaseListener listener) { * Adds a single statement to the knowledge base. * * @param statement the statement to be added - * @return true, if the knowledge base has changed. */ - public void addStatement(Statement statement) { - if (doAddStatement(statement)) { - notifyListenersOnStatementAdded(statement); + public void addStatement(final Statement statement) { + if (this.doAddStatement(statement)) { + this.notifyListenersOnStatementAdded(statement); } } @@ -213,7 +212,7 @@ public void addStatement(Statement statement) { * @param statement the statement to be added * @return true, if the knowledge base has changed. */ - boolean doAddStatement(Statement statement) { + boolean doAddStatement(final Statement statement) { Validate.notNull(statement, "Statement cannot be Null!"); if (!this.statements.contains(statement) && statement.accept(this.addStatementVisitor)) { this.statements.add(statement); @@ -227,16 +226,16 @@ boolean doAddStatement(Statement statement) { * * @param statements the statements to be added */ - public void addStatements(Collection statements) { + public void addStatements(final Collection statements) { final List addedStatements = new ArrayList<>(); for (final Statement statement : statements) { - if (doAddStatement(statement)) { + if (this.doAddStatement(statement)) { addedStatements.add(statement); } } - notifyListenersOnStatementsAdded(addedStatements); + this.notifyListenersOnStatementsAdded(addedStatements); } /** @@ -244,27 +243,26 @@ public void addStatements(Collection statements) { * * @param statements the statements to be added */ - public void addStatements(Statement... statements) { + public void addStatements(final Statement... statements) { final List addedStatements = new ArrayList<>(); for (final Statement statement : statements) { - if (doAddStatement(statement)) { + if (this.doAddStatement(statement)) { addedStatements.add(statement); } } - notifyListenersOnStatementsAdded(addedStatements); + this.notifyListenersOnStatementsAdded(addedStatements); } /** * Removes a single statement from the knowledge base. * - * @return true, if the knowledge base has changed. * @param statement the statement to remove */ - public void removeStatement(Statement statement) { - if (doRemoveStatement(statement)) { - notifyListenersOnStatementRemoved(statement); + public void removeStatement(final Statement statement) { + if (this.doRemoveStatement(statement)) { + this.notifyListenersOnStatementRemoved(statement); } } @@ -274,7 +272,7 @@ public void removeStatement(Statement statement) { * @param statement the statement to remove * @return true, if the knowledge base has changed. */ - boolean doRemoveStatement(Statement statement) { + boolean doRemoveStatement(final Statement statement) { Validate.notNull(statement, "Statement cannot be Null!"); if (this.statements.contains(statement) && statement.accept(this.removeStatementVisitor)) { @@ -289,16 +287,16 @@ boolean doRemoveStatement(Statement statement) { * * @param statements the statements to remove */ - public void removeStatements(Collection statements) { + public void removeStatements(final Collection statements) { final List removedStatements = new ArrayList<>(); for (final Statement statement : statements) { - if (doRemoveStatement(statement)) { + if (this.doRemoveStatement(statement)) { removedStatements.add(statement); } } - notifyListenersOnStatementsRemoved(removedStatements); + this.notifyListenersOnStatementsRemoved(removedStatements); } /** @@ -306,16 +304,16 @@ public void removeStatements(Collection statements) { * * @param statements the statements to remove */ - public void removeStatements(Statement... statements) { + public void removeStatements(final Statement... statements) { final List removedStatements = new ArrayList<>(); for (final Statement statement : statements) { - if (doRemoveStatement(statement)) { + if (this.doRemoveStatement(statement)) { removedStatements.add(statement); } } - notifyListenersOnStatementsRemoved(removedStatements); + this.notifyListenersOnStatementsRemoved(removedStatements); } private void notifyListenersOnStatementAdded(final Statement addedStatement) { @@ -332,13 +330,13 @@ private void notifyListenersOnStatementsAdded(final List addedStateme } } - private void notifyListenersOnStatementRemoved(Statement removedStatement) { + private void notifyListenersOnStatementRemoved(final Statement removedStatement) { for (final KnowledgeBaseListener listener : this.listeners) { listener.onStatementRemoved(removedStatement); } } - private void notifyListenersOnStatementsRemoved(List removedStatements) { + private void notifyListenersOnStatementsRemoved(final List removedStatements) { if (!removedStatements.isEmpty()) { for (final KnowledgeBaseListener listener : this.listeners) { listener.onStatementsRemoved(removedStatements); @@ -353,7 +351,7 @@ private void notifyListenersOnStatementsRemoved(List removedStatement * @return list of {@link Rule}s */ public List getRules() { - return getStatementsByType(Rule.class); + return this.getStatementsByType(Rule.class); } /** @@ -363,7 +361,7 @@ public List getRules() { * @return list of {@link Fact}s */ public List getFacts() { - return getStatementsByType(Fact.class); + return this.getStatementsByType(Fact.class); } /** @@ -374,10 +372,10 @@ public List getFacts() { * @return list of {@link DataSourceDeclaration}s */ public List getDataSourceDeclarations() { - return getStatementsByType(DataSourceDeclaration.class); + return this.getStatementsByType(DataSourceDeclaration.class); } - List getStatementsByType(Class type) { + List getStatementsByType(final Class type) { final ExtractStatementsVisitor visitor = new ExtractStatementsVisitor<>(type); for (final Statement statement : this.statements) { statement.accept(visitor); @@ -391,7 +389,7 @@ List getStatementsByType(Class type) { * * @param fact the fact to add */ - void addFact(Fact fact) { + void addFact(final Fact fact) { final Predicate predicate = fact.getPredicate(); this.factsByPredicate.putIfAbsent(predicate, new HashSet<>()); this.factsByPredicate.get(predicate).add(fact); @@ -403,7 +401,7 @@ void addFact(Fact fact) { * * @param fact the fact to remove */ - void removeFact(Fact fact) { + void removeFact(final Fact fact) { final Predicate predicate = fact.getPredicate(); final Set facts = this.factsByPredicate.get(predicate); facts.remove(fact); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index f391bbe80..1da67d694 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -2,16 +2,17 @@ import java.io.IOException; +import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.ExistentialVariable; import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.NamedNull; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.TermType; +import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; -import karmaresearch.vlog.Atom; - /* * #%L * VLog4j Core Components @@ -50,17 +51,17 @@ * * The loaded reasoner can perform atomic queries on explicit and * implicit facts after calling {@link Reasoner#reason()}. Queries can provide - * an iterator for the results ({@link #answerQuery(Atom, boolean)}, or the - * results can be exported to a file - * ({@link #exportQueryAnswersToCsv(Atom, String, boolean)}).
    + * an iterator for the results ({@link #answerQuery(PositiveLiteral, boolean)}, + * or the results can be exported to a file + * ({@link #exportQueryAnswersToCsv(PositiveLiteral, String, boolean)}).
    *
    * Reasoning with various {@link Algorithm}s is supported, that can lead * to different sets of inferred facts and different termination behavior. In * some cases, reasoning with rules with existentially quantified variables - * {@link Rule#getExistentiallyQuantifiedVariables()} may not terminate. We - * recommend reasoning with algorithm {@link Algorithm#RESTRICTED_CHASE}, as it - * leads to termination in more cases. To avoid non-termination, a reasoning - * timeout can be set ({@link Reasoner#setReasoningTimeout(Integer)}).
    + * ({@link ExistentialVariable}) may not terminate. We recommend reasoning with + * algorithm {@link Algorithm#RESTRICTED_CHASE}, as it leads to termination in + * more cases. To avoid non-termination, a reasoning timeout can be set + * ({@link Reasoner#setReasoningTimeout(Integer)}).
    * * @author Irina Dragoste * @@ -73,7 +74,7 @@ public interface Reasoner extends AutoCloseable, KnowledgeBaseListener { * * @return a {@link VLogReasoner} instance. */ - public static Reasoner getInstance() { + static Reasoner getInstance() { final KnowledgeBase knowledgeBase = new KnowledgeBase(); return new VLogReasoner(knowledgeBase); } @@ -104,9 +105,9 @@ public static Reasoner getInstance() { /** * In some cases, reasoning with rules with existentially quantified variables - * {@link Rule#getExistentiallyQuantifiedVariables()} may not terminate. We - * recommend reasoning with algorithm {@link Algorithm#RESTRICTED_CHASE}, as it - * leads to termination in more cases.
    + * ({@link ExistentialVariable}) may not terminate. We recommend reasoning with + * algorithm {@link Algorithm#RESTRICTED_CHASE}, as it leads to termination in + * more cases.
    * This method sets a timeout (in seconds) after which reasoning can be * artificially interrupted if it has not reached completion. * @@ -178,7 +179,7 @@ public static Reasoner getInstance() { * Checks whether the loaded rules and loaded fact EDB predicates are Acyclic, * Cyclic, or cyclicity cannot be determined. * - * @return + * @return the appropriate CyclicityResult. */ CyclicityResult checkForCycles(); @@ -259,9 +260,9 @@ public static Reasoner getInstance() { * knowledge base rules.
    *
    * In some cases, reasoning with rules with existentially quantified variables - * {@link Rule#getExistentiallyQuantifiedVariables()} may not terminate. We - * recommend reasoning with algorithm {@link Algorithm#RESTRICTED_CHASE}, as it - * leads to termination in more cases.
    + * {@link ExistentialVariable} may not terminate. We recommend reasoning with + * algorithm {@link Algorithm#RESTRICTED_CHASE}, as it leads to termination in + * more cases.
    * To avoid non-termination, a reasoning timeout can be set * ({@link Reasoner#setReasoningTimeout(Integer)}).
    * @@ -281,11 +282,11 @@ public static Reasoner getInstance() { * the reasoner and the explicit facts materialised by the reasoner.
    * An answer to the query is the terms a fact that matches the {@code query}: * the fact predicate is the same as the {@code query} predicate, the - * {@link TermType#CONSTANT} terms of the {@code query} appear in the answer - * fact at the same term position, and the {@link TermType#VARIABLE} terms of - * the {@code query} are matched by terms in the fact, either named - * ({@link TermType#CONSTANT}) or anonymous ({@link TermType#NAMED_NULL}). The - * same variable name identifies the same term in the answer fact.
    + * {@link Constant} terms of the {@code query} appear in the answer fact at the + * same term position, and the {@link Variable} terms of the {@code query} are + * matched by terms in the fact, either named ({@link Constant}) or anonymous + * ({@link NamedNull}). The same variable name identifies the same term in the + * answer fact.
    * A query answer is represented by a {@link QueryResult}. A query can have * multiple, distinct query answers. This method returns an Iterator over these * answers.
    @@ -319,12 +320,12 @@ public static Reasoner getInstance() { * @param query a {@link PositiveLiteral} representing the query to be * answered. * @param includeNulls if {@code true}, {@link QueryResult}s containing terms of - * type {@link TermType#NAMED_NULL} (representing anonymous + * type {@link NamedNull} (representing anonymous * individuals introduced to satisfy rule existentially * quantified variables) will be included. Otherwise, the * answers will only contain the {@link QueryResult}s with - * terms of type {@link TermType#CONSTANT} (representing - * named individuals). + * terms of type {@link Constant} (representing named + * individuals). * @return QueryResultIterator that iterates over distinct answers to the query. * It also contains the {@link Correctness} of the query answers. */ @@ -338,11 +339,11 @@ public static Reasoner getInstance() { *
    * An answer to the query is the terms a fact that matches the {@code query}: * the fact predicate is the same as the {@code query} predicate, the - * {@link TermType#CONSTANT} terms of the {@code query} appear in the answer - * fact at the same term position, and the {@link TermType#VARIABLE} terms of - * the {@code query} are matched by terms in the fact, either named - * ({@link TermType#CONSTANT}) or anonymous ({@link TermType#NAMED_NULL}). The - * same variable name identifies the same term in the answer fact.
    + * {@link Constant} terms of the {@code query} appear in the answer fact at the + * same term position, and the {@link Variable} terms of the {@code query} are + * matched by terms in the fact, either named ({@link Constant}) or anonymous + * ({@link NamedNull}). The same variable name identifies the same term in the + * answer fact.
    * A query can have multiple, distinct query answers. Each answers is written on * a separate line in the given file. * @@ -353,12 +354,11 @@ public static Reasoner getInstance() { * represents a query answer, and it will contain the fact * term names as columns. * @param includeNulls if {@code true}, answers containing terms of type - * {@link TermType#NAMED_NULL} (representing anonymous - * individuals introduced to satisfy rule existentially - * quantified variables) will be included. Otherwise, the - * answers will only contain those with terms of type - * {@link TermType#CONSTANT} (representing named - * individuals). + * {@link NamedNull} (representing anonymous individuals + * introduced to satisfy rule existentially quantified + * variables) will be included. Otherwise, the answers will + * only contain those with terms of type {@link Constant} + * (representing named individuals). * * @throws IOException if an I/O error occurs regarding given file * ({@code csvFilePath)}. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java index 4fb79e735..5a30d7359 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java @@ -29,14 +29,12 @@ */ public enum ReasonerState { /** - * State a Reasoner is in before method {@link Reasoner#load()} has been called. - * Querying is not allowed in this state. + * State a Reasoner is in before loading. Querying is not allowed in this state. */ KB_NOT_LOADED("knowledge base not loaded"), /** - * State a Reasoner is in after method {@link Reasoner#load()} has been called, - * and before method {@link Reasoner#reason()} has been called. The Reasoner can - * be queried. + * State a Reasoner is in after loading, and before method + * {@link Reasoner#reason()} has been called. The Reasoner can be queried. */ KB_LOADED("knowledge base loaded"), @@ -65,13 +63,13 @@ public enum ReasonerState { private final String name; - private ReasonerState(String name) { + private ReasonerState(final String name) { this.name = name; } @Override public String toString() { - return name; + return this.name; } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java index e498cacf2..6a290f021 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java @@ -23,10 +23,11 @@ import java.util.Arrays; import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.Fact; /** * A {@link DataSource} for representing a large number of facts that were - * generated in Java. Rather than making {@link Fact} objects for all of them, + * generated in Java. Rather than creating {@link Fact} objects for all of them, * the object will directly accept tuples of constant names that are internally * stored in a form that can be passed to the reasoner directly, thereby saving * memory and loading time. @@ -50,10 +51,10 @@ public class InMemoryDataSource implements DataSource { * @param arity the number of parameters in a fact from this source * @param initialCapacity the planned number of facts */ - public InMemoryDataSource(int arity, int initialCapacity) { + public InMemoryDataSource(final int arity, final int initialCapacity) { this.capacity = initialCapacity; this.arity = arity; - data = new String[initialCapacity][arity]; + this.data = new String[initialCapacity][arity]; } /** @@ -62,20 +63,20 @@ public InMemoryDataSource(int arity, int initialCapacity) { * * @param constantNames the string names of the constants in this fact */ - public void addTuple(String... constantNames) { - if (constantNames.length != arity) { - throw new IllegalArgumentException("This data source holds tuples of arity " + arity + public void addTuple(final String... constantNames) { + if (constantNames.length != this.arity) { + throw new IllegalArgumentException("This data source holds tuples of arity " + this.arity + ". Adding a tuple of size " + constantNames.length + " is not possible."); } - if (nextEmptyTuple == capacity) { - capacity = capacity * 2; - this.data = Arrays.copyOf(data, capacity); + if (this.nextEmptyTuple == this.capacity) { + this.capacity = this.capacity * 2; + this.data = Arrays.copyOf(this.data, this.capacity); } - data[nextEmptyTuple] = new String[arity]; - for (int i = 0; i < arity; i++) { - data[nextEmptyTuple][i] = TermToVLogConverter.getVLogNameForConstantName(constantNames[i]); + this.data[this.nextEmptyTuple] = new String[this.arity]; + for (int i = 0; i < this.arity; i++) { + this.data[this.nextEmptyTuple][i] = TermToVLogConverter.getVLogNameForConstantName(constantNames[i]); } - nextEmptyTuple++; + this.nextEmptyTuple++; } /** @@ -85,7 +86,7 @@ public void addTuple(String... constantNames) { * @return the data */ public String[][] getData() { - if (nextEmptyTuple == capacity) { + if (this.nextEmptyTuple == this.capacity) { return this.data; } else { return Arrays.copyOf(this.data, this.nextEmptyTuple); @@ -94,11 +95,11 @@ public String[][] getData() { @Override public String getSyntacticRepresentation() { - StringBuilder sb = new StringBuilder( + final StringBuilder sb = new StringBuilder( "This InMemoryDataSource holds the following tuples of constant names, one tuple per line:"); for (int i = 0; i < this.getData().length; i++) { - for (int j = 0; j < data[i].length; j++) { - sb.append(data[i][j] + " "); + for (int j = 0; j < this.data[i].length; j++) { + sb.append(this.data[i][j] + " "); } sb.append("\n"); } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java index a21ef2999..a75b5a35a 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java @@ -100,8 +100,8 @@ public static void printOutQueryAnswers(final PositiveLiteral queryAtom, final R * Prints out the answers given by {@code reasoner} to the query * ({@code queryAtom}). * - * @param queryAtom query to be answered - * @param reasoner reasoner to query on + * @param queryString query to be answered + * @param reasoner reasoner to query on */ public static void printOutQueryAnswers(final String queryString, final Reasoner reasoner) { try { @@ -129,8 +129,8 @@ public static int getQueryAnswerCount(final PositiveLiteral queryAtom, final Rea * Returns the number of answers returned by {@code reasoner} to the query * ({@code queryAtom}). * - * @param queryAtom query to be answered - * @param reasoner reasoner to query on + * @param queryString query to be answered + * @param reasoner reasoner to query on */ public static int getQueryAnswerCount(final String queryString, final Reasoner reasoner) { try { diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java index 055989044..f738449f4 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java @@ -45,7 +45,7 @@ /** * This example shows how vlog4j-owlapi library (class * {@link OwlToRulesConverter}) can be used to transform an OWL ontology into - * vlog4j-core {@link Rule}s and {@link Atom}s. + * vlog4j-core {@link Rule}s and {@link Fact}s. * * @author Irina Dragoste * diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java index 19ef07ba2..180b93053 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java @@ -23,6 +23,7 @@ import java.util.List; import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; /** @@ -46,7 +47,7 @@ public interface DataSourceDeclarationHandler { * Source, or the number of arguments is invalid. * @return a {@link DataSource} instance corresponding to the given arguments. */ - public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) + DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) throws ParsingException; /** @@ -58,7 +59,7 @@ public DataSource handleDeclaration(List arguments, final SubParserFacto * @throws ParsingException when the given number of Arguments is invalid for * the Data Source. */ - static void validateNumberOfArguments(List arguments, int number) throws ParsingException { + static void validateNumberOfArguments(final List arguments, final int number) throws ParsingException { if (arguments.size() != number) { throw new ParsingException("Invalid number of arguments " + arguments.size() + " for Data Source declaration, expected " + number); From 6741fe8c4e16cc7ec2b8b230073dfde7e0c8e94b Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Fri, 6 Dec 2019 19:21:01 +0100 Subject: [PATCH 0457/1003] fixed some javadoc errors --- .../graal/GraalToVLog4JModelConverter.java | 8 ++--- .../vlog4j/parser/ParserConfiguration.java | 36 +++++++++---------- .../parser/javacc/SubParserFactory.java | 33 ++++++++--------- 3 files changed, 39 insertions(+), 38 deletions(-) diff --git a/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverter.java b/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverter.java index bec53d7fb..fe44ed0b9 100644 --- a/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverter.java +++ b/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverter.java @@ -93,8 +93,8 @@ public static Fact convertAtomToFact(final fr.lirmm.graphik.graal.api.core.Atom * Atoms} into a {@link List} of {@link PositiveLiteral VLog4J * PositiveLiterals}. * - * @param literals A {@link List} of {@link fr.lirmm.graphik.graal.api.core.Atom - * Graal Atoms}. + * @param literals list of {@link fr.lirmm.graphik.graal.api.core.Atom Graal + * Atoms}. * @return A {@link List} of {@link PositiveLiteral VLog4J PositiveLiterals}. */ public static List convertAtoms(final List atoms) { @@ -109,8 +109,8 @@ public static List convertAtoms(final List convertAtomsToFacts(final List atoms) { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index a3b16dcba..770c7fd16 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -26,6 +26,7 @@ import org.apache.commons.lang3.Validate; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; @@ -39,12 +40,12 @@ public class ParserConfiguration { /** * The registered data sources. */ - private HashMap dataSources = new HashMap<>(); + private final HashMap dataSources = new HashMap<>(); /** * The registered datatypes. */ - private HashMap datatypes = new HashMap<>(); + private final HashMap datatypes = new HashMap<>(); /** * Register a new (type of) Data Source. @@ -53,8 +54,8 @@ public class ParserConfiguration { * production of the rules grammar, corresponding to some {@link DataSource} * type. * - * @see <"https://github.com/knowsys/vlog4j/wiki/Rule-syntax-grammar"> for the - * grammar. + * @see + * the grammar. * * @param name Name of the data source, as it appears in the declaring * directive. @@ -63,9 +64,9 @@ public class ParserConfiguration { * @throws IllegalArgumentException if the provided name is already registered. * @return this */ - public ParserConfiguration registerDataSource(String name, DataSourceDeclarationHandler handler) + public ParserConfiguration registerDataSource(final String name, final DataSourceDeclarationHandler handler) throws IllegalArgumentException { - Validate.isTrue(!dataSources.containsKey(name), "The Data Source \"%s\" is already registered.", name); + Validate.isTrue(!this.dataSources.containsKey(name), "The Data Source \"%s\" is already registered.", name); this.dataSources.put(name, handler); return this; @@ -87,9 +88,9 @@ public ParserConfiguration registerDataSource(String name, DataSourceDeclaration * * @return the Data Source instance. */ - public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(String name, List args, + public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(final String name, final List args, final SubParserFactory subParserFactory) throws ParsingException { - DataSourceDeclarationHandler handler = dataSources.get(name); + final DataSourceDeclarationHandler handler = this.dataSources.get(name); if (handler == null) { throw new ParsingException("Data source \"" + name + "\" is not known."); @@ -104,8 +105,7 @@ public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(String name * @param lexicalForm the (unescaped) lexical form of the constant. * @param languageTag the language tag, or null if not present. * @param the datatype, or null if not present. - * @note At most one of {@code languageTag} and {@code datatype} may be - * non-null. + * @pre At most one of {@code languageTag} and {@code datatype} may be non-null. * * @throws ParsingException when the lexical form is invalid for the * given data type. @@ -113,21 +113,21 @@ public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(String name * {@code datatype} are non-null. * @return the {@link Constant} corresponding to the given arguments. */ - public Constant parseConstant(String lexicalForm, String languageTag, String datatype) + public Constant parseConstant(final String lexicalForm, final String languageTag, final String datatype) throws ParsingException, IllegalArgumentException { - Validate.isTrue(languageTag == null || datatype == null, + Validate.isTrue((languageTag == null) || (datatype == null), "A constant with a language tag may not explicitly specify a data type."); if (languageTag != null) { return Expressions.makeLanguageStringConstant(lexicalForm, languageTag); } else { - return parseDatatypeConstant(lexicalForm, datatype); + return this.parseDatatypeConstant(lexicalForm, datatype); } } - private Constant parseDatatypeConstant(String lexicalForm, String datatype) throws ParsingException { - String type = ((datatype != null) ? datatype : PrefixDeclarations.XSD_STRING); - DatatypeConstantHandler handler = datatypes.get(type); + private Constant parseDatatypeConstant(final String lexicalForm, final String datatype) throws ParsingException { + final String type = ((datatype != null) ? datatype : PrefixDeclarations.XSD_STRING); + final DatatypeConstantHandler handler = this.datatypes.get(type); if (handler != null) { return handler.createConstant(lexicalForm); @@ -148,9 +148,9 @@ private Constant parseDatatypeConstant(String lexicalForm, String datatype) thro * * @return this */ - public ParserConfiguration registerDatatype(String name, DatatypeConstantHandler handler) + public ParserConfiguration registerDatatype(final String name, final DatatypeConstantHandler handler) throws IllegalArgumentException { - Validate.isTrue(!datatypes.containsKey(name), "The Data type \"%s\" is already registered.", name); + Validate.isTrue(!this.datatypes.containsKey(name), "The Data type \"%s\" is already registered.", name); this.datatypes.put(name, handler); return this; diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java index 1b9deccaa..c4a012baf 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.parser.javacc; +import java.io.ByteArrayInputStream; + /*- * #%L * vlog4j-parser @@ -21,7 +23,6 @@ */ import java.io.InputStream; -import java.io.ByteArrayInputStream; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; @@ -29,23 +30,23 @@ import org.semanticweb.vlog4j.parser.RuleParser; /** - * Factory for creating a SubParser sharing configuration, state, and - * prefixes, but with an independent input stream, to be used, e.g., - * for parsing arguments in data source declarations. + * Factory for creating a SubParser sharing configuration, state, and prefixes, + * but with an independent input stream, to be used, e.g., for parsing arguments + * in data source declarations. * * @author Maximilian Marx */ public class SubParserFactory { - private KnowledgeBase knowledgeBase; - private ParserConfiguration parserConfiguration; - private PrefixDeclarations prefixDeclarations; + private final KnowledgeBase knowledgeBase; + private final ParserConfiguration parserConfiguration; + private final PrefixDeclarations prefixDeclarations; /** * Construct a SubParserFactory. * - * @argument parser the parser instance to get the state from. + * @param parser the parser instance to get the state from. */ - SubParserFactory(JavaCCParser parser) { + SubParserFactory(final JavaCCParser parser) { this.knowledgeBase = parser.getKnowledgeBase(); this.prefixDeclarations = parser.getPrefixDeclarations(); this.parserConfiguration = parser.getParserConfiguration(); @@ -54,14 +55,14 @@ public class SubParserFactory { /** * Create a new parser with the specified state and given input. * - * @argument inputStream the input stream to parse. - * @argument encoding encoding of the input stream. + * @param inputStream the input stream to parse. + * @param encoding encoding of the input stream. * - * @return A new {@link JavaCCParser} bound to inputStream and - * with the specified parser state. + * @return A new {@link JavaCCParser} bound to inputStream and with the + * specified parser state. */ public JavaCCParser makeSubParser(final InputStream inputStream, final String encoding) { - JavaCCParser subParser = new JavaCCParser(inputStream, encoding); + final JavaCCParser subParser = new JavaCCParser(inputStream, encoding); subParser.setKnowledgeBase(this.knowledgeBase); subParser.setPrefixDeclarations(this.prefixDeclarations); subParser.setParserConfiguration(this.parserConfiguration); @@ -70,10 +71,10 @@ public JavaCCParser makeSubParser(final InputStream inputStream, final String en } public JavaCCParser makeSubParser(final InputStream inputStream) { - return makeSubParser(inputStream, RuleParser.DEFAULT_STRING_ENCODING); + return this.makeSubParser(inputStream, RuleParser.DEFAULT_STRING_ENCODING); } public JavaCCParser makeSubParser(final String string) { - return makeSubParser(new ByteArrayInputStream(string.getBytes())); + return this.makeSubParser(new ByteArrayInputStream(string.getBytes())); } } From 7ae5eb3409d904af9e57e621e9ca385de00b466f Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Fri, 6 Dec 2019 19:54:36 +0100 Subject: [PATCH 0458/1003] change to snapshot version --- coverage/pom.xml | 14 +++++++------- pom.xml | 2 +- vlog4j-client/pom.xml | 2 +- vlog4j-core/pom.xml | 2 +- vlog4j-examples/pom.xml | 2 +- vlog4j-graal/pom.xml | 2 +- vlog4j-owlapi/pom.xml | 2 +- vlog4j-parser/pom.xml | 2 +- vlog4j-rdf/pom.xml | 2 +- 9 files changed, 15 insertions(+), 15 deletions(-) diff --git a/coverage/pom.xml b/coverage/pom.xml index 0f2270cb3..16e01e7f8 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0 + 0.6.0-SNAPSHOT coverage @@ -16,32 +16,32 @@ org.semanticweb.vlog4j vlog4j-core - 0.5.0 + 0.6.0-SNAPSHOT org.semanticweb.vlog4j vlog4j-rdf - 0.5.0 + 0.6.0-SNAPSHOT org.semanticweb.vlog4j vlog4j-owlapi - 0.5.0 + 0.6.0-SNAPSHOT org.semanticweb.vlog4j vlog4j-graal - 0.5.0 + 0.6.0-SNAPSHOT org.semanticweb.vlog4j vlog4j-parser - 0.5.0 + 0.6.0-SNAPSHOT org.semanticweb.vlog4j vlog4j-client - 0.5.0 + 0.6.0-SNAPSHOT diff --git a/pom.xml b/pom.xml index 54f123629..e7754337d 100644 --- a/pom.xml +++ b/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0 + 0.6.0-SNAPSHOT pom VLog4j diff --git a/vlog4j-client/pom.xml b/vlog4j-client/pom.xml index 7bfe4fc43..0e7b14f00 100644 --- a/vlog4j-client/pom.xml +++ b/vlog4j-client/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0 + 0.6.0-SNAPSHOT vlog4j-client diff --git a/vlog4j-core/pom.xml b/vlog4j-core/pom.xml index b3c3183c3..502f9053c 100644 --- a/vlog4j-core/pom.xml +++ b/vlog4j-core/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0 + 0.6.0-SNAPSHOT vlog4j-core diff --git a/vlog4j-examples/pom.xml b/vlog4j-examples/pom.xml index aa3132d08..1e23eb09e 100644 --- a/vlog4j-examples/pom.xml +++ b/vlog4j-examples/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0 + 0.6.0-SNAPSHOT vlog4j-examples diff --git a/vlog4j-graal/pom.xml b/vlog4j-graal/pom.xml index a5b6724e3..7487cf100 100644 --- a/vlog4j-graal/pom.xml +++ b/vlog4j-graal/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0 + 0.6.0-SNAPSHOT vlog4j-graal diff --git a/vlog4j-owlapi/pom.xml b/vlog4j-owlapi/pom.xml index 2bb0a6834..3433c9bee 100644 --- a/vlog4j-owlapi/pom.xml +++ b/vlog4j-owlapi/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0 + 0.6.0-SNAPSHOT vlog4j-owlapi diff --git a/vlog4j-parser/pom.xml b/vlog4j-parser/pom.xml index 14bce0278..b8a7b07f4 100644 --- a/vlog4j-parser/pom.xml +++ b/vlog4j-parser/pom.xml @@ -8,7 +8,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0 + 0.6.0-SNAPSHOT vlog4j-parser diff --git a/vlog4j-rdf/pom.xml b/vlog4j-rdf/pom.xml index cd75da4e2..45c74ed50 100644 --- a/vlog4j-rdf/pom.xml +++ b/vlog4j-rdf/pom.xml @@ -8,7 +8,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0 + 0.6.0-SNAPSHOT vlog4j-rdf From 4480d3590ac591ccd7c21e5c5da1a8fd4a6e3eb7 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 10 Dec 2019 14:20:14 +0100 Subject: [PATCH 0459/1003] fix unit tests failing on Windows OS --- .../vlog4j/core/model/implementation/Serializer.java | 9 ++++++++- .../vlog4j/core/model/DataSourceDeclarationTest.java | 8 +++++--- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index b3c9fa9f7..a95e54529 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -334,7 +334,14 @@ private static String getIRIString(final String string) { } } - private static String escape(final String string) { + /** + * Escapes (with '\') special character occurrences in given {@code string}. The + * special characters are: "\", "'", "\t", "\b", "\n", "\r", "\f". + * + * @param string + * @return an escaped string + */ + public static String escape(final String string) { return string.replace("\\", "\\\\").replace("\"", "\\\"").replace("\t", "\\t").replace("\b", "\\b") .replace("\n", "\\n").replace("\r", "\\r").replace("\f", "\\f"); // don't touch single quotes here since we only construct double-quoted strings diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java index c3ebcd4fb..a2124804a 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java @@ -35,6 +35,7 @@ import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; @@ -90,8 +91,9 @@ public void toString_CsvFileDataSource() throws IOException { final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate2, unzippedCsvFileDataSource); - final String expectedFilePath = relativeDirName + File.separator + fileName; - assertEquals("@source q[1]: load-csv(\"" + expectedFilePath + "\") .", dataSourceDeclaration.toString()); + final String expectedFilePath = Serializer.escape(relativeDirName + File.separator + fileName); + assertEquals("@source q[1]: load-csv(\"" + expectedFilePath + "\") .", + dataSourceDeclaration.toString()); } // FIXME: have String representation of files OS independent @@ -116,7 +118,7 @@ public void toString_RdfFileDataSource_relativePath() throws IOException { final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, unzippedRdfFileDataSource); - final String expectedFilePath = relativeDirName + File.separator + fileName; + final String expectedFilePath = Serializer.escape(relativeDirName + File.separator + fileName); assertEquals("@source q[1]: load-rdf(\"" + expectedFilePath + "\") .", dataSourceDeclaration.toString()); } } From d44efaf01cfd1a909961c5b56cf6e02e5c667eb8 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 10 Dec 2019 15:20:58 +0100 Subject: [PATCH 0460/1003] make Serializer.escape private --- .../core/model/implementation/Serializer.java | 48 ++++++++++++++++--- .../core/model/DataSourceDeclarationTest.java | 8 ++-- 2 files changed, 45 insertions(+), 11 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index a95e54529..0e37fd672 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -177,7 +177,7 @@ public static String getString(final AbstractConstant constant) { * {@link LanguageStringConstant}. */ public static String getConstantName(final LanguageStringConstant languageStringConstant) { - return addQuotes(escape(languageStringConstant.getString())) + AT + languageStringConstant.getLanguageTag(); + return getString(languageStringConstant.getString()) + AT + languageStringConstant.getLanguageTag(); } /** @@ -191,7 +191,7 @@ public static String getConstantName(final LanguageStringConstant languageString */ public static String getString(final DatatypeConstant datatypeConstant) { if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_STRING)) { - return addQuotes(escape(datatypeConstant.getLexicalValue())); + return getString(datatypeConstant.getLexicalValue()); } else { if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DECIMAL) || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_INTEGER) @@ -213,7 +213,7 @@ public static String getString(final DatatypeConstant datatypeConstant) { * {@link DatatypeConstant}. */ public static String getConstantName(final DatatypeConstant datatypeConstant) { - return addQuotes(escape(datatypeConstant.getLexicalValue())) + DOUBLE_CARET + return getString(datatypeConstant.getLexicalValue()) + DOUBLE_CARET + addAngleBrackets(datatypeConstant.getDatatype()); } @@ -322,7 +322,7 @@ public static String getString(final SparqlQueryResultDataSource dataSource) { } private static String getFileString(final FileDataSource fileDataSource) { - return addQuotes(escape(fileDataSource.getFile().toString())); + return getString(fileDataSource.getFile().toString()); } private static String getIRIString(final String string) { @@ -335,13 +335,46 @@ private static String getIRIString(final String string) { } /** - * Escapes (with '\') special character occurrences in given {@code string}. The - * special characters are: "\", "'", "\t", "\b", "\n", "\r", "\f". + * Constructs the parseable, serialized representation of given {@code string}. + * Escapes (with {@code \}) special character occurrences in given + * {@code string}, and surrounds the result with double quotation marks + * ({@code "}). The special characters are: + *
      + *
    • {@code \}
    • + *
    • {@code "}
    • + *
    • {@code \t}
    • + *
    • {@code \b}
    • + *
    • {@code \n}
    • + *
    • {@code \r}
    • + *
    • {@code \f}
    • + *
        + * Example for {@code string = "\\a"}, the returned value is + * {@code string = "\"\\\\a\""} + * + * @param string + * @return an escaped string surrounded by {@code "}. + */ + public static String getString(final String string) { + return addQuotes(escape(string)); + } + + /** + * Escapes (with {@code \}) special character occurrences in given + * {@code string}. The special characters are: + *
          + *
        • {@code \}
        • + *
        • {@code "}
        • + *
        • {@code \t}
        • + *
        • {@code \b}
        • + *
        • {@code \n}
        • + *
        • {@code \r}
        • + *
        • {@code \f}
        • + *
            * * @param string * @return an escaped string */ - public static String escape(final String string) { + private static String escape(final String string) { return string.replace("\\", "\\\\").replace("\"", "\\\"").replace("\t", "\\t").replace("\b", "\\b") .replace("\n", "\\n").replace("\r", "\\r").replace("\f", "\\f"); // don't touch single quotes here since we only construct double-quoted strings @@ -351,6 +384,7 @@ private static String addQuotes(final String string) { return QUOTE + string + QUOTE; } + private static String addAngleBrackets(final String string) { return LESS_THAN + string + MORE_THAN; } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java index a2124804a..c02772192 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java @@ -91,8 +91,8 @@ public void toString_CsvFileDataSource() throws IOException { final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate2, unzippedCsvFileDataSource); - final String expectedFilePath = Serializer.escape(relativeDirName + File.separator + fileName); - assertEquals("@source q[1]: load-csv(\"" + expectedFilePath + "\") .", + final String expectedFilePath = Serializer.getString(relativeDirName + File.separator + fileName); + assertEquals("@source q[1]: load-csv(" + expectedFilePath + ") .", dataSourceDeclaration.toString()); } @@ -118,7 +118,7 @@ public void toString_RdfFileDataSource_relativePath() throws IOException { final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, unzippedRdfFileDataSource); - final String expectedFilePath = Serializer.escape(relativeDirName + File.separator + fileName); - assertEquals("@source q[1]: load-rdf(\"" + expectedFilePath + "\") .", dataSourceDeclaration.toString()); + final String expectedFilePath = Serializer.getString(relativeDirName + File.separator + fileName); + assertEquals("@source q[1]: load-rdf(" + expectedFilePath + ") .", dataSourceDeclaration.toString()); } } From 0b9ecba4e65515312d55cc2bf516e78ac259e798 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 7 Jan 2020 13:51:36 +0100 Subject: [PATCH 0461/1003] Disable parallel test execution Parallel test execution seems to cause corrupted coverage files when building on Windows, so disable it. --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index e7754337d..95ee3ad64 100644 --- a/pom.xml +++ b/pom.xml @@ -233,7 +233,7 @@ 3.0.0-M4 ${surefireArgLine} - 1C + 1 true From af2d6f4dd0992206d7617d1e654573c779d915d7 Mon Sep 17 00:00:00 2001 From: alloka Date: Tue, 7 Jan 2020 19:36:06 +0100 Subject: [PATCH 0462/1003] added methods to obtain facts --- .../vlog4j/core/reasoner/KnowledgeBase.java | 869 +++++++++--------- .../vlog4j/core/reasoner/Reasoner.java | 5 + .../reasoner/implementation/VLogReasoner.java | 50 + .../examples/SimpleReasoningExample.java | 1 - 4 files changed, 489 insertions(+), 436 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java index 54c4a256f..d63f9be70 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -1,435 +1,434 @@ -package org.semanticweb.vlog4j.core.reasoner; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.LinkedHashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.lang3.Validate; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Statement; -import org.semanticweb.vlog4j.core.model.api.StatementVisitor; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 - 2019 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -/** - * A knowledge base with rules, facts, and declarations for loading data from - * further sources. This is a "syntactic" object in that it represents some - * information that is not relevant for the semantics of reasoning, but that is - * needed to ensure faithful re-serialisation of knowledge bases loaded from - * files (e.g., preserving order). - * - * @author Markus Kroetzsch - * - */ -public class KnowledgeBase implements Iterable { - - private final Set listeners = new HashSet<>(); - - /** - * Auxiliary class to process {@link Statement}s when added to the knowledge - * base. Returns true if a statement was added successfully. - * - * @author Markus Kroetzsch - * - */ - private class AddStatementVisitor implements StatementVisitor { - @Override - public Boolean visit(final Fact statement) { - KnowledgeBase.this.addFact(statement); - return true; - } - - @Override - public Boolean visit(final Rule statement) { - return true; - } - - @Override - public Boolean visit(final DataSourceDeclaration statement) { - KnowledgeBase.this.dataSourceDeclarations.add(statement); - return true; - } - } - - private final AddStatementVisitor addStatementVisitor = new AddStatementVisitor(); - - /** - * Auxiliary class to process {@link Statement}s when removed from the knowledge - * base. Returns true if a statement was removed successfully. - * - * @author Irina Dragoste - * - */ - private class RemoveStatementVisitor implements StatementVisitor { - - @Override - public Boolean visit(final Fact statement) { - KnowledgeBase.this.removeFact(statement); - return true; - } - - @Override - public Boolean visit(final Rule statement) { - return true; - } - - @Override - public Boolean visit(final DataSourceDeclaration statement) { - KnowledgeBase.this.dataSourceDeclarations.remove(statement); - return true; - } - } - - private final RemoveStatementVisitor removeStatementVisitor = new RemoveStatementVisitor(); - - private class ExtractStatementsVisitor implements StatementVisitor { - - final ArrayList extracted = new ArrayList<>(); - final Class ownType; - - ExtractStatementsVisitor(final Class type) { - this.ownType = type; - } - - ArrayList getExtractedStatements() { - return this.extracted; - } - - @SuppressWarnings("unchecked") - @Override - public Void visit(final Fact statement) { - if (this.ownType.equals(Fact.class)) { - this.extracted.add((T) statement); - } - return null; - } - - @SuppressWarnings("unchecked") - @Override - public Void visit(final Rule statement) { - if (this.ownType.equals(Rule.class)) { - this.extracted.add((T) statement); - } - return null; - } - - @SuppressWarnings("unchecked") - @Override - public Void visit(final DataSourceDeclaration statement) { - if (this.ownType.equals(DataSourceDeclaration.class)) { - this.extracted.add((T) statement); - } - return null; - } - } - - /** - * The primary storage for the contents of the knowledge base. - */ - private final LinkedHashSet statements = new LinkedHashSet<>(); - -// TODO support prefixes -// /** -// * Known prefixes that can be used to pretty-print the contents of the knowledge -// * base. We try to preserve user-provided prefixes found in files when loading -// * data. -// */ -// PrefixDeclarations prefixDeclarations; - - /** - * Index structure that organises all facts by their predicate. - */ - private final Map> factsByPredicate = new HashMap<>(); - - /** - * Index structure that holds all data source declarations of this knowledge - * base. - */ - private final Set dataSourceDeclarations = new HashSet<>(); - - /** - * Registers a listener for changes on the knowledge base - * - * @param listener - */ - public void addListener(final KnowledgeBaseListener listener) { - this.listeners.add(listener); - } - - /** - * Unregisters given listener from changes on the knowledge base - * - * @param listener - */ - public void deleteListener(final KnowledgeBaseListener listener) { - this.listeners.remove(listener); - - } - - /** - * Adds a single statement to the knowledge base. - * - * @param statement the statement to be added - */ - public void addStatement(final Statement statement) { - if (this.doAddStatement(statement)) { - this.notifyListenersOnStatementAdded(statement); - } - } - - /** - * Adds a single statement to the knowledge base. - * - * @param statement the statement to be added - * @return true, if the knowledge base has changed. - */ - boolean doAddStatement(final Statement statement) { - Validate.notNull(statement, "Statement cannot be Null!"); - if (!this.statements.contains(statement) && statement.accept(this.addStatementVisitor)) { - this.statements.add(statement); - return true; - } - return false; - } - - /** - * Adds a collection of statements to the knowledge base. - * - * @param statements the statements to be added - */ - public void addStatements(final Collection statements) { - final List addedStatements = new ArrayList<>(); - - for (final Statement statement : statements) { - if (this.doAddStatement(statement)) { - addedStatements.add(statement); - } - } - - this.notifyListenersOnStatementsAdded(addedStatements); - } - - /** - * Adds a list of statements to the knowledge base. - * - * @param statements the statements to be added - */ - public void addStatements(final Statement... statements) { - final List addedStatements = new ArrayList<>(); - - for (final Statement statement : statements) { - if (this.doAddStatement(statement)) { - addedStatements.add(statement); - } - } - - this.notifyListenersOnStatementsAdded(addedStatements); - } - - /** - * Removes a single statement from the knowledge base. - * - * @param statement the statement to remove - */ - public void removeStatement(final Statement statement) { - if (this.doRemoveStatement(statement)) { - this.notifyListenersOnStatementRemoved(statement); - } - } - - /** - * Removes a single statement from the knowledge base. - * - * @param statement the statement to remove - * @return true, if the knowledge base has changed. - */ - boolean doRemoveStatement(final Statement statement) { - Validate.notNull(statement, "Statement cannot be Null!"); - - if (this.statements.contains(statement) && statement.accept(this.removeStatementVisitor)) { - this.statements.remove(statement); - return true; - } - return false; - } - - /** - * Removes a collection of statements to the knowledge base. - * - * @param statements the statements to remove - */ - public void removeStatements(final Collection statements) { - final List removedStatements = new ArrayList<>(); - - for (final Statement statement : statements) { - if (this.doRemoveStatement(statement)) { - removedStatements.add(statement); - } - } - - this.notifyListenersOnStatementsRemoved(removedStatements); - } - - /** - * Removes a list of statements from the knowledge base. - * - * @param statements the statements to remove - */ - public void removeStatements(final Statement... statements) { - final List removedStatements = new ArrayList<>(); - - for (final Statement statement : statements) { - if (this.doRemoveStatement(statement)) { - removedStatements.add(statement); - } - } - - this.notifyListenersOnStatementsRemoved(removedStatements); - } - - private void notifyListenersOnStatementAdded(final Statement addedStatement) { - for (final KnowledgeBaseListener listener : this.listeners) { - listener.onStatementAdded(addedStatement); - } - } - - private void notifyListenersOnStatementsAdded(final List addedStatements) { - if (!addedStatements.isEmpty()) { - for (final KnowledgeBaseListener listener : this.listeners) { - listener.onStatementsAdded(addedStatements); - } - } - } - - private void notifyListenersOnStatementRemoved(final Statement removedStatement) { - for (final KnowledgeBaseListener listener : this.listeners) { - listener.onStatementRemoved(removedStatement); - } - } - - private void notifyListenersOnStatementsRemoved(final List removedStatements) { - if (!removedStatements.isEmpty()) { - for (final KnowledgeBaseListener listener : this.listeners) { - listener.onStatementsRemoved(removedStatements); - } - } - } - - /** - * Get the list of all rules that have been added to the knowledge base. The - * list is read-only and cannot be modified to add or delete rules. - * - * @return list of {@link Rule}s - */ - public List getRules() { - return this.getStatementsByType(Rule.class); - } - - /** - * Get the list of all facts that have been added to the knowledge base. The - * list is read-only and cannot be modified to add or delete facts. - * - * @return list of {@link Fact}s - */ - public List getFacts() { - return this.getStatementsByType(Fact.class); - } - - /** - * Get the list of all data source declarations that have been added to the - * knowledge base. The list is read-only and cannot be modified to add or delete - * facts. - * - * @return list of {@link DataSourceDeclaration}s - */ - public List getDataSourceDeclarations() { - return this.getStatementsByType(DataSourceDeclaration.class); - } - - List getStatementsByType(final Class type) { - final ExtractStatementsVisitor visitor = new ExtractStatementsVisitor<>(type); - for (final Statement statement : this.statements) { - statement.accept(visitor); - } - return Collections.unmodifiableList(visitor.getExtractedStatements()); - } - - /** - * Add a single fact to the internal data structures. It is assumed that it has - * already been checked that this fact is not present yet. - * - * @param fact the fact to add - */ - void addFact(final Fact fact) { - final Predicate predicate = fact.getPredicate(); - this.factsByPredicate.putIfAbsent(predicate, new HashSet<>()); - this.factsByPredicate.get(predicate).add(fact); - } - - /** - * Removes a single fact from the internal data structure. It is assumed that it - * has already been checked that this fact is already present. - * - * @param fact the fact to remove - */ - void removeFact(final Fact fact) { - final Predicate predicate = fact.getPredicate(); - final Set facts = this.factsByPredicate.get(predicate); - facts.remove(fact); - if (facts.isEmpty()) { - this.factsByPredicate.remove(predicate); - } - } - - /** - * Returns all {@link Statement}s of this knowledge base. - * - * The result can be iterated over and will return statements in the original - * order. The collection is read-only and cannot be modified to add or delete - * statements. - * - * @return a collection of statements - */ - public Collection getStatements() { - return Collections.unmodifiableCollection(this.statements); - } - - @Override - public Iterator iterator() { - return Collections.unmodifiableCollection(this.statements).iterator(); - } - - Map> getFactsByPredicate() { - return this.factsByPredicate; - } - -} +package org.semanticweb.vlog4j.core.reasoner; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import org.apache.commons.lang3.Validate; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Statement; +import org.semanticweb.vlog4j.core.model.api.StatementVisitor; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * A knowledge base with rules, facts, and declarations for loading data from + * further sources. This is a "syntactic" object in that it represents some + * information that is not relevant for the semantics of reasoning, but that is + * needed to ensure faithful re-serialisation of knowledge bases loaded from + * files (e.g., preserving order). + * + * @author Markus Kroetzsch + * + */ +public class KnowledgeBase implements Iterable { + + private final Set listeners = new HashSet<>(); + + /** + * Auxiliary class to process {@link Statement}s when added to the knowledge + * base. Returns true if a statement was added successfully. + * + * @author Markus Kroetzsch + * + */ + private class AddStatementVisitor implements StatementVisitor { + @Override + public Boolean visit(final Fact statement) { + KnowledgeBase.this.addFact(statement); + return true; + } + + @Override + public Boolean visit(final Rule statement) { + return true; + } + + @Override + public Boolean visit(final DataSourceDeclaration statement) { + KnowledgeBase.this.dataSourceDeclarations.add(statement); + return true; + } + } + + private final AddStatementVisitor addStatementVisitor = new AddStatementVisitor(); + + /** + * Auxiliary class to process {@link Statement}s when removed from the knowledge + * base. Returns true if a statement was removed successfully. + * + * @author Irina Dragoste + * + */ + private class RemoveStatementVisitor implements StatementVisitor { + + @Override + public Boolean visit(final Fact statement) { + KnowledgeBase.this.removeFact(statement); + return true; + } + + @Override + public Boolean visit(final Rule statement) { + return true; + } + + @Override + public Boolean visit(final DataSourceDeclaration statement) { + KnowledgeBase.this.dataSourceDeclarations.remove(statement); + return true; + } + } + + private final RemoveStatementVisitor removeStatementVisitor = new RemoveStatementVisitor(); + + private class ExtractStatementsVisitor implements StatementVisitor { + + final ArrayList extracted = new ArrayList<>(); + final Class ownType; + + ExtractStatementsVisitor(final Class type) { + this.ownType = type; + } + + ArrayList getExtractedStatements() { + return this.extracted; + } + + @SuppressWarnings("unchecked") + @Override + public Void visit(final Fact statement) { + if (this.ownType.equals(Fact.class)) { + this.extracted.add((T) statement); + } + return null; + } + + @SuppressWarnings("unchecked") + @Override + public Void visit(final Rule statement) { + if (this.ownType.equals(Rule.class)) { + this.extracted.add((T) statement); + } + return null; + } + + @SuppressWarnings("unchecked") + @Override + public Void visit(final DataSourceDeclaration statement) { + if (this.ownType.equals(DataSourceDeclaration.class)) { + this.extracted.add((T) statement); + } + return null; + } + } + + /** + * The primary storage for the contents of the knowledge base. + */ + private final LinkedHashSet statements = new LinkedHashSet<>(); + +// TODO support prefixes +// /** +// * Known prefixes that can be used to pretty-print the contents of the knowledge +// * base. We try to preserve user-provided prefixes found in files when loading +// * data. +// */ +// PrefixDeclarations prefixDeclarations; + + /** + * Index structure that organises all facts by their predicate. + */ + private final Map> factsByPredicate = new HashMap<>(); + + /** + * Index structure that holds all data source declarations of this knowledge + * base. + */ + private final Set dataSourceDeclarations = new HashSet<>(); + + /** + * Registers a listener for changes on the knowledge base + * + * @param listener + */ + public void addListener(final KnowledgeBaseListener listener) { + this.listeners.add(listener); + } + + /** + * Unregisters given listener from changes on the knowledge base + * + * @param listener + */ + public void deleteListener(final KnowledgeBaseListener listener) { + this.listeners.remove(listener); + + } + + /** + * Adds a single statement to the knowledge base. + * + * @param statement the statement to be added + */ + public void addStatement(final Statement statement) { + if (this.doAddStatement(statement)) { + this.notifyListenersOnStatementAdded(statement); + } + } + + /** + * Adds a single statement to the knowledge base. + * + * @param statement the statement to be added + * @return true, if the knowledge base has changed. + */ + boolean doAddStatement(final Statement statement) { + Validate.notNull(statement, "Statement cannot be Null!"); + if (!this.statements.contains(statement) && statement.accept(this.addStatementVisitor)) { + this.statements.add(statement); + return true; + } + return false; + } + + /** + * Adds a collection of statements to the knowledge base. + * + * @param statements the statements to be added + */ + public void addStatements(final Collection statements) { + final List addedStatements = new ArrayList<>(); + + for (final Statement statement : statements) { + if (this.doAddStatement(statement)) { + addedStatements.add(statement); + } + } + + this.notifyListenersOnStatementsAdded(addedStatements); + } + + /** + * Adds a list of statements to the knowledge base. + * + * @param statements the statements to be added + */ + public void addStatements(final Statement... statements) { + final List addedStatements = new ArrayList<>(); + + for (final Statement statement : statements) { + if (this.doAddStatement(statement)) { + addedStatements.add(statement); + } + } + + this.notifyListenersOnStatementsAdded(addedStatements); + } + + /** + * Removes a single statement from the knowledge base. + * + * @param statement the statement to remove + */ + public void removeStatement(final Statement statement) { + if (this.doRemoveStatement(statement)) { + this.notifyListenersOnStatementRemoved(statement); + } + } + + /** + * Removes a single statement from the knowledge base. + * + * @param statement the statement to remove + * @return true, if the knowledge base has changed. + */ + boolean doRemoveStatement(final Statement statement) { + Validate.notNull(statement, "Statement cannot be Null!"); + + if (this.statements.contains(statement) && statement.accept(this.removeStatementVisitor)) { + this.statements.remove(statement); + return true; + } + return false; + } + + /** + * Removes a collection of statements to the knowledge base. + * + * @param statements the statements to remove + */ + public void removeStatements(final Collection statements) { + final List removedStatements = new ArrayList<>(); + + for (final Statement statement : statements) { + if (this.doRemoveStatement(statement)) { + removedStatements.add(statement); + } + } + + this.notifyListenersOnStatementsRemoved(removedStatements); + } + + /** + * Removes a list of statements from the knowledge base. + * + * @param statements the statements to remove + */ + public void removeStatements(final Statement... statements) { + final List removedStatements = new ArrayList<>(); + + for (final Statement statement : statements) { + if (this.doRemoveStatement(statement)) { + removedStatements.add(statement); + } + } + + this.notifyListenersOnStatementsRemoved(removedStatements); + } + + private void notifyListenersOnStatementAdded(final Statement addedStatement) { + for (final KnowledgeBaseListener listener : this.listeners) { + listener.onStatementAdded(addedStatement); + } + } + + private void notifyListenersOnStatementsAdded(final List addedStatements) { + if (!addedStatements.isEmpty()) { + for (final KnowledgeBaseListener listener : this.listeners) { + listener.onStatementsAdded(addedStatements); + } + } + } + + private void notifyListenersOnStatementRemoved(final Statement removedStatement) { + for (final KnowledgeBaseListener listener : this.listeners) { + listener.onStatementRemoved(removedStatement); + } + } + + private void notifyListenersOnStatementsRemoved(final List removedStatements) { + if (!removedStatements.isEmpty()) { + for (final KnowledgeBaseListener listener : this.listeners) { + listener.onStatementsRemoved(removedStatements); + } + } + } + + /** + * Get the list of all rules that have been added to the knowledge base. The + * list is read-only and cannot be modified to add or delete rules. + * + * @return list of {@link Rule}s + */ + public List getRules() { + return this.getStatementsByType(Rule.class); + } + + /** + * Get the list of all facts that have been added to the knowledge base. The + * list is read-only and cannot be modified to add or delete facts. + * + * @return list of {@link Fact}s + */ + public List getFacts() { + return this.getStatementsByType(Fact.class); + } + + /** + * Get the list of all data source declarations that have been added to the + * knowledge base. The list is read-only and cannot be modified to add or delete + * facts. + * + * @return list of {@link DataSourceDeclaration}s + */ + public List getDataSourceDeclarations() { + return this.getStatementsByType(DataSourceDeclaration.class); + } + + List getStatementsByType(final Class type) { + final ExtractStatementsVisitor visitor = new ExtractStatementsVisitor<>(type); + for (final Statement statement : this.statements) { + statement.accept(visitor); + } + return Collections.unmodifiableList(visitor.getExtractedStatements()); + } + + /** + * Add a single fact to the internal data structures. It is assumed that it has + * already been checked that this fact is not present yet. + * + * @param fact the fact to add + */ + void addFact(final Fact fact) { + final Predicate predicate = fact.getPredicate(); + this.factsByPredicate.putIfAbsent(predicate, new HashSet<>()); + this.factsByPredicate.get(predicate).add(fact); + } + + /** + * Removes a single fact from the internal data structure. It is assumed that it + * has already been checked that this fact is already present. + * + * @param fact the fact to remove + */ + void removeFact(final Fact fact) { + final Predicate predicate = fact.getPredicate(); + final Set facts = this.factsByPredicate.get(predicate); + facts.remove(fact); + if (facts.isEmpty()) { + this.factsByPredicate.remove(predicate); + } + } + + /** + * Returns all {@link Statement}s of this knowledge base. + * + * The result can be iterated over and will return statements in the original + * order. The collection is read-only and cannot be modified to add or delete + * statements. + * + * @return a collection of statements + */ + public Collection getStatements() { + return Collections.unmodifiableCollection(this.statements); + } + + @Override + public Iterator iterator() { + return Collections.unmodifiableCollection(this.statements).iterator(); + } + + Map> getFactsByPredicate() { + return this.factsByPredicate; + } + +} \ No newline at end of file diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 1da67d694..cf80c90d3 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -1,6 +1,7 @@ package org.semanticweb.vlog4j.core.reasoner; import java.io.IOException; +import java.io.OutputStream; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; @@ -86,6 +87,10 @@ static Reasoner getInstance() { */ KnowledgeBase getKnowledgeBase(); + void getKbFacts(OutputStream stream) throws IOException; + + void getKbFacts(String filePath) throws IOException; + /** * Sets the algorithm that will be used for reasoning over the knowledge base. * If no algorithm is set, the default algorithm is diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 260db93e2..e37dd9612 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -1,6 +1,8 @@ package org.semanticweb.vlog4j.core.reasoner.implementation; +import java.io.FileOutputStream; import java.io.IOException; +import java.io.OutputStream; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Arrays; @@ -20,6 +22,7 @@ import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.StatementVisitor; @@ -808,4 +811,51 @@ ReasonerState getReasonerState() { void setReasonerState(ReasonerState reasonerState) { this.reasonerState = reasonerState; } + + @Override + public void getKbFacts(OutputStream stream) throws IOException { + // TODO Auto-generated method stub + HashSet headLiterals = new HashSet(); + for (Rule rule : this.knowledgeBase.getRules()) { + for (PositiveLiteral positiveLiteral : rule.getHead()) { + headLiterals.add(positiveLiteral); + } + } + for (PositiveLiteral positiveliteral : headLiterals) { + try (final QueryResultIterator queryAnswers = this.answerQuery(positiveliteral, true)) { + while (queryAnswers.hasNext()) { + QueryResult queryAnswer = queryAnswers.next(); + stream.write((positiveliteral.getPredicate().getName() + + queryAnswer.getTerms().toString().replace("[", "(").replace("]", ").") + "\n") + .getBytes()); + } + } + } + stream.close(); + + } + + @Override + public void getKbFacts(String filePath) throws IOException { + // TODO Auto-generated method stub + OutputStream stream = new FileOutputStream(filePath); + HashSet headLiterals = new HashSet(); + for (Rule rule : this.knowledgeBase.getRules()) { + for (PositiveLiteral positiveLiteral : rule.getHead()) { + headLiterals.add(positiveLiteral); + } + } + for (PositiveLiteral positiveliteral : headLiterals) { + try (final QueryResultIterator queryAnswers = this.answerQuery(positiveliteral, true)) { + while (queryAnswers.hasNext()) { + QueryResult queryAnswer = queryAnswers.next(); + stream.write((positiveliteral.getPredicate().getName() + + queryAnswer.getTerms().toString().replace("[", "(").replace("]", ").") + "\n") + .getBytes()); + } + } + } + stream.close(); + } + } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java index 5b5875a63..b269047b7 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java @@ -78,7 +78,6 @@ public static void main(final String[] args) throws IOException { try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - /* Execute some queries */ ExamplesUtils.printOutQueryAnswers("address(?Org, ?Street, ?ZIP, ?City)", reasoner); ExamplesUtils.printOutQueryAnswers("locatedIn(?place, europe)", reasoner); From a9c9df8a52eef17c31bcb602ffafde14caba5469 Mon Sep 17 00:00:00 2001 From: alloka Date: Wed, 8 Jan 2020 13:17:59 +0100 Subject: [PATCH 0463/1003] added javadoc and methods for facts --- .../vlog4j/core/reasoner/Reasoner.java | 12 +++++++++++ .../reasoner/implementation/VLogReasoner.java | 21 ++----------------- 2 files changed, 14 insertions(+), 19 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index cf80c90d3..f04b197ea 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -87,8 +87,20 @@ static Reasoner getInstance() { */ KnowledgeBase getKnowledgeBase(); + /** + * Exports all the facts of the knowledge base to an OutputStream. This includes + * the added facts and the inferred facts as well. + * + * @param an OutpumStream for the facts to be written to. + */ void getKbFacts(OutputStream stream) throws IOException; + /** + * Exports all the facts of the knowledge base to a desired file. This includes + * the added facts and the inferred facts as well. + * + * @param a String of the file path for the facts to be written to. + */ void getKbFacts(String filePath) throws IOException; /** diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index e37dd9612..e6b75b2b4 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -839,23 +839,6 @@ public void getKbFacts(OutputStream stream) throws IOException { public void getKbFacts(String filePath) throws IOException { // TODO Auto-generated method stub OutputStream stream = new FileOutputStream(filePath); - HashSet headLiterals = new HashSet(); - for (Rule rule : this.knowledgeBase.getRules()) { - for (PositiveLiteral positiveLiteral : rule.getHead()) { - headLiterals.add(positiveLiteral); - } - } - for (PositiveLiteral positiveliteral : headLiterals) { - try (final QueryResultIterator queryAnswers = this.answerQuery(positiveliteral, true)) { - while (queryAnswers.hasNext()) { - QueryResult queryAnswer = queryAnswers.next(); - stream.write((positiveliteral.getPredicate().getName() - + queryAnswer.getTerms().toString().replace("[", "(").replace("]", ").") + "\n") - .getBytes()); - } - } - } - stream.close(); + getKbFacts(stream); } - -} +} \ No newline at end of file From e4806c3791889b09cffa913710aafff1f562062d Mon Sep 17 00:00:00 2001 From: alloka Date: Wed, 8 Jan 2020 13:22:23 +0100 Subject: [PATCH 0464/1003] added new line --- .../vlog4j/core/reasoner/implementation/VLogReasoner.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index e6b75b2b4..b5dc3c20b 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -841,4 +841,4 @@ public void getKbFacts(String filePath) throws IOException { OutputStream stream = new FileOutputStream(filePath); getKbFacts(stream); } -} \ No newline at end of file +} From ce698ffb0f1ed77a62bc2450cbd01c04e09bc55f Mon Sep 17 00:00:00 2001 From: alloka Date: Thu, 9 Jan 2020 17:08:32 +0100 Subject: [PATCH 0465/1003] fixed javadoc --- .../semanticweb/vlog4j/core/reasoner/Reasoner.java | 12 +++++------- .../core/reasoner/implementation/VLogReasoner.java | 8 +++----- 2 files changed, 8 insertions(+), 12 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index f04b197ea..65180c59f 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -88,20 +88,18 @@ static Reasoner getInstance() { KnowledgeBase getKnowledgeBase(); /** - * Exports all the facts of the knowledge base to an OutputStream. This includes - * the added facts and the inferred facts as well. + * Exports the inferred facts of the knowledge base to an OutputStream. * - * @param an OutpumStream for the facts to be written to. + * @param an OutputStream for the facts to be written to. */ - void getKbFacts(OutputStream stream) throws IOException; + void writeInferredFacts(OutputStream stream) throws IOException; /** - * Exports all the facts of the knowledge base to a desired file. This includes - * the added facts and the inferred facts as well. + * Exports the inferred facts of the knowledge base to a desired file. * * @param a String of the file path for the facts to be written to. */ - void getKbFacts(String filePath) throws IOException; + void writeInferredFacts(String filePath) throws IOException; /** * Sets the algorithm that will be used for reasoning over the knowledge base. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index b5dc3c20b..53eb1f706 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -813,8 +813,7 @@ void setReasonerState(ReasonerState reasonerState) { } @Override - public void getKbFacts(OutputStream stream) throws IOException { - // TODO Auto-generated method stub + public void writeInferredFacts(OutputStream stream) throws IOException { HashSet headLiterals = new HashSet(); for (Rule rule : this.knowledgeBase.getRules()) { for (PositiveLiteral positiveLiteral : rule.getHead()) { @@ -836,9 +835,8 @@ public void getKbFacts(OutputStream stream) throws IOException { } @Override - public void getKbFacts(String filePath) throws IOException { - // TODO Auto-generated method stub + public void writeInferredFacts(String filePath) throws IOException { OutputStream stream = new FileOutputStream(filePath); - getKbFacts(stream); + writeInferredFacts(stream); } } From 2c2c82f35d9e9a4341139a15a67e49b3bd709d00 Mon Sep 17 00:00:00 2001 From: alloka Date: Sat, 18 Jan 2020 15:25:53 +0100 Subject: [PATCH 0466/1003] added some changes regarding method and serializer --- pom.xml | 153 +++++++++--------- .../core/model/implementation/Serializer.java | 8 +- .../reasoner/implementation/VLogReasoner.java | 33 ++-- vlog4j-examples/allola.txt | 23 +++ 4 files changed, 126 insertions(+), 91 deletions(-) create mode 100644 vlog4j-examples/allola.txt diff --git a/pom.xml b/pom.xml index 95ee3ad64..c521ee208 100644 --- a/pom.xml +++ b/pom.xml @@ -15,9 +15,8 @@ https://github.com/knowsys/vlog4j - + vlog4j-core vlog4j-rdf vlog4j-examples @@ -141,7 +140,7 @@ - org.eclipse.m2e lifecycle-mapping @@ -205,9 +204,9 @@ - org.jacoco - jacoco-maven-plugin - 0.8.5 + org.jacoco + jacoco-maven-plugin + 0.8.5 @@ -217,7 +216,7 @@ license-maven-plugin - org.apache.maven.plugins maven-compiler-plugin @@ -228,82 +227,80 @@ - org.apache.maven.plugins - maven-surefire-plugin - 3.0.0-M4 - - ${surefireArgLine} - 1 - true - + org.apache.maven.plugins + maven-surefire-plugin + 3.0.0-M4 + + ${surefireArgLine} + 1 + true + - - org.eluder.coveralls - coveralls-maven-plugin - 4.3.0 - - - coverage/target/site/jacoco-aggregate/jacoco.xml - - - - - - javax.xml.bind - jaxb-api - 2.3.1 - - + + org.eluder.coveralls + coveralls-maven-plugin + 4.3.0 + + + coverage/target/site/jacoco-aggregate/jacoco.xml + + + + + + javax.xml.bind + jaxb-api + 2.3.1 + + - - org.jacoco - jacoco-maven-plugin - - - prepare-agent - - prepare-agent - - - surefireArgLine - - - - default-cli - - report - - test - - - ${project.reporting.outputDirectory}/jacoco-ut - - - - - - - **/javacc/JavaCCParser.class - **/javacc/JavaCCParserConstants.class - **/javacc/JavaCCParserTokenManager.class - **/javacc/JavaCharStream.class - **/javacc/ParseException.class - **/javacc/SimpleCharStream.class - **/javacc/Token.class - **/javacc/TokenMgrError.class - - + + org.jacoco + jacoco-maven-plugin + + + prepare-agent + + prepare-agent + + + surefireArgLine + + + + default-cli + + report + + test + + + ${project.reporting.outputDirectory}/jacoco-ut + + + + + + + **/javacc/JavaCCParser.class + **/javacc/JavaCCParserConstants.class + **/javacc/JavaCCParserTokenManager.class + **/javacc/JavaCharStream.class + **/javacc/ParseException.class + **/javacc/SimpleCharStream.class + **/javacc/Token.class + **/javacc/TokenMgrError.class + + - + org.apache.maven.plugins maven-javadoc-plugin ${maven.javadoc.version} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 0e37fd672..5fd525b96 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.core.model.implementation; +import java.util.List; + import org.semanticweb.vlog4j.core.model.api.AbstractConstant; /*- @@ -384,9 +386,13 @@ private static String addQuotes(final String string) { return QUOTE + string + QUOTE; } - private static String addAngleBrackets(final String string) { return LESS_THAN + string + MORE_THAN; } + public static String getFactOutput(Predicate predicate, List terms) { + return predicate.getName() + terms.toString().replace(OPENING_BRACKET, OPENING_PARENTHESIS) + .replace(CLOSING_BRACKET, CLOSING_PARENTHESIS + STATEMENT_SEPARATOR + "\n"); + } + } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 53eb1f706..478464216 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -22,15 +22,16 @@ import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.StatementVisitor; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; import org.semanticweb.vlog4j.core.model.implementation.UniversalVariableImpl; import org.semanticweb.vlog4j.core.reasoner.AcyclicityNotion; import org.semanticweb.vlog4j.core.reasoner.Algorithm; @@ -814,21 +815,29 @@ void setReasonerState(ReasonerState reasonerState) { @Override public void writeInferredFacts(OutputStream stream) throws IOException { - HashSet headLiterals = new HashSet(); + HashSet toBeQueriedHeadPredicates = new HashSet(); + for (Fact fact : this.knowledgeBase.getFacts()) { + stream.write((fact.toString() + "\n").getBytes()); + } for (Rule rule : this.knowledgeBase.getRules()) { - for (PositiveLiteral positiveLiteral : rule.getHead()) { - headLiterals.add(positiveLiteral); + for (Literal literal : rule.getHead()) { + toBeQueriedHeadPredicates.add(literal.getPredicate()); } } - for (PositiveLiteral positiveliteral : headLiterals) { - try (final QueryResultIterator queryAnswers = this.answerQuery(positiveliteral, true)) { - while (queryAnswers.hasNext()) { - QueryResult queryAnswer = queryAnswers.next(); - stream.write((positiveliteral.getPredicate().getName() - + queryAnswer.getTerms().toString().replace("[", "(").replace("]", ").") + "\n") - .getBytes()); - } + for (Predicate predicate : toBeQueriedHeadPredicates) { + ArrayList tobeGroundedVariables = new ArrayList(); + for (int i = 0; i < predicate.getArity(); i++) { + tobeGroundedVariables.add(Expressions.makeUniversalVariable("X" + i)); } + final QueryResultIterator answers = this + .answerQuery(Expressions.makePositiveLiteral(predicate, tobeGroundedVariables), true); + answers.forEachRemaining(queryAnswer -> { + try { + stream.write(Serializer.getFactOutput(predicate, queryAnswer.getTerms()).getBytes()); + } catch (IOException e) { + e.printStackTrace(); + } + }); } stream.close(); diff --git a/vlog4j-examples/allola.txt b/vlog4j-examples/allola.txt new file mode 100644 index 000000000..92837f326 --- /dev/null +++ b/vlog4j-examples/allola.txt @@ -0,0 +1,23 @@ +location(germany, europe) . +location(uk, europe) . +location(saxony, germany) . +location(dresden, saxony) . +city(dresden) . +country(germany) . +country(uk) . +university(tudresden, germany) . +university(uoxford, uk) . +streetAddress(tudresden, "Mommsenstraße 9", "01069", "Dresden") . +zipLocation("01069", dresden) . +locatedIn(germany, europe) . +locatedIn(uk, europe) . +locatedIn(dresden, saxony) . +locatedIn(saxony, germany) . +locatedIn(dresden, germany) . +locatedIn(dresden, europe) . +locatedIn(saxony, europe) . +locatedIn(_2_4_0, uk) . +locatedIn(_2_4_0, europe) . +address(tudresden, "Mommsenstraße 9", "01069", dresden) . +address(uoxford, _2_2_0, _2_3_0, _2_4_0) . +inEuropeOutsideGermany(uoxford) . From 5059dc161aa9341d379f3696781edb84f4827cd1 Mon Sep 17 00:00:00 2001 From: alloka Date: Sat, 18 Jan 2020 15:26:33 +0100 Subject: [PATCH 0467/1003] added some changes regarding method and serializer --- vlog4j-examples/allola.txt | 23 ----------------------- 1 file changed, 23 deletions(-) delete mode 100644 vlog4j-examples/allola.txt diff --git a/vlog4j-examples/allola.txt b/vlog4j-examples/allola.txt deleted file mode 100644 index 92837f326..000000000 --- a/vlog4j-examples/allola.txt +++ /dev/null @@ -1,23 +0,0 @@ -location(germany, europe) . -location(uk, europe) . -location(saxony, germany) . -location(dresden, saxony) . -city(dresden) . -country(germany) . -country(uk) . -university(tudresden, germany) . -university(uoxford, uk) . -streetAddress(tudresden, "Mommsenstraße 9", "01069", "Dresden") . -zipLocation("01069", dresden) . -locatedIn(germany, europe) . -locatedIn(uk, europe) . -locatedIn(dresden, saxony) . -locatedIn(saxony, germany) . -locatedIn(dresden, germany) . -locatedIn(dresden, europe) . -locatedIn(saxony, europe) . -locatedIn(_2_4_0, uk) . -locatedIn(_2_4_0, europe) . -address(tudresden, "Mommsenstraße 9", "01069", dresden) . -address(uoxford, _2_2_0, _2_3_0, _2_4_0) . -inEuropeOutsideGermany(uoxford) . From bd20d6418b4346aa0098230ba03139f6f1486b8b Mon Sep 17 00:00:00 2001 From: alloka Date: Fri, 24 Jan 2020 02:38:28 +0100 Subject: [PATCH 0468/1003] added unit test --- .../reasoner/implementation/VLogReasoner.java | 12 +- .../vlog4j/syntax/parser/RuleParserTest.java | 104 +++++++++++++++++- 2 files changed, 106 insertions(+), 10 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 478464216..7992dafb5 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -816,14 +816,15 @@ void setReasonerState(ReasonerState reasonerState) { @Override public void writeInferredFacts(OutputStream stream) throws IOException { HashSet toBeQueriedHeadPredicates = new HashSet(); - for (Fact fact : this.knowledgeBase.getFacts()) { - stream.write((fact.toString() + "\n").getBytes()); - } for (Rule rule : this.knowledgeBase.getRules()) { for (Literal literal : rule.getHead()) { toBeQueriedHeadPredicates.add(literal.getPredicate()); } } + for (Fact fact : this.knowledgeBase.getFacts()) { + if (!toBeQueriedHeadPredicates.contains(fact.getPredicate())) + stream.write((fact.toString() + "\n").getBytes()); + } for (Predicate predicate : toBeQueriedHeadPredicates) { ArrayList tobeGroundedVariables = new ArrayList(); for (int i = 0; i < predicate.getArity(); i++) { @@ -834,13 +835,14 @@ public void writeInferredFacts(OutputStream stream) throws IOException { answers.forEachRemaining(queryAnswer -> { try { stream.write(Serializer.getFactOutput(predicate, queryAnswer.getTerms()).getBytes()); - } catch (IOException e) { + } + + catch (IOException e) { e.printStackTrace(); } }); } stream.close(); - } @Override diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 971202610..6a62099eb 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -19,27 +19,43 @@ * limitations under the License. * #L% */ - -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; - +import static org.junit.Assert.assertEquals; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileOutputStream; +import java.io.FileReader; +import java.io.IOException; +import java.io.OutputStream; +import java.net.URL; import java.util.ArrayList; import java.util.Arrays; +import java.util.HashSet; import org.junit.Test; import org.mockito.ArgumentMatchers; import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; +import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.Reasoner; +import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; import org.semanticweb.vlog4j.parser.DatatypeConstantHandler; import org.semanticweb.vlog4j.parser.ParserConfiguration; import org.semanticweb.vlog4j.parser.ParsingException; @@ -438,4 +454,82 @@ public void testCustomDatatype() throws ParsingException { DatatypeConstant result = (DatatypeConstant) literal.getConstants().toArray()[0]; assertEquals(constant, result); } + + @Test + public void testGetFacts() throws ParsingException, IOException { + KnowledgeBase kb = new KnowledgeBase(); + final DataSource dataSource1 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var", + "?var wdt:P31 wd:Q5 ."); + final Predicate predicate1 = Expressions.makePredicate("country", 1); + final DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource1); + final DataSource dataSource2 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var", + "?var wdt:P31 wd:Q5 ."); + final Predicate predicate2 = Expressions.makePredicate("inEuropeOutsideGermany", 1); + final DataSourceDeclaration dataSourceDeclaration2 = new DataSourceDeclarationImpl(predicate2, dataSource2); + final DataSource dataSource3 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var2", + "?var2 wdt:P31 wd:Q5 ."); + final DataSourceDeclaration dataSourceDeclaration3 = new DataSourceDeclarationImpl(predicate2, dataSource3); + final Predicate predicate4 = Expressions.makePredicate("city", 1); + final DataSourceDeclaration dataSourceDeclaration4 = new DataSourceDeclarationImpl(predicate4, dataSource2); + RuleParser.parseInto(kb, fact.toString() + "."); + RuleParser.parseInto(kb, dataSourceDeclaration1.toString()); + RuleParser.parseInto(kb, dataSourceDeclaration2.toString()); + RuleParser.parseInto(kb, dataSourceDeclaration3.toString()); + RuleParser.parseInto(kb, dataSourceDeclaration4.toString()); + final String rules = "location(germany,europe). \n" // + + "location(saxony,germany). \n" // + + "location(dresden,saxony). \n" // + + "locatedIn(Egypt,Africa). \n" // + + "address(TSH, \"Pragerstraße 13\", \"01069\", dresden). \n" // + + "city(dresden). \n" // + + "country(germany). \n" // + + "university(tudresden, germany). \n" // + + "streetAddress(tudresden, \"Mommsenstraße 9\", \"01069\", \"Dresden\") . \n" // + + "zipLocation(\"01069\", dresden) . \n" // + + "locatedIn(?X,?Y) :- location(?X,?Y) . \n" // + + "locatedIn(?X,?Z) :- locatedIn(?X,?Y), locatedIn(?Y,?Z) . \n" // + + "address(?Uni, ?Street, ?ZIP, ?City) :- streetAddress(?Uni, ?Street, ?ZIP, ?CityName), zipLocation(?ZIP, ?City) . \n" + + "address(?Uni, !Street, !ZIP, !City), locatedIn(!City, ?Country) :- university(?Uni, ?Country) . \n"; + RuleParser.parseInto(kb, rules); + final String facts = "location(germany,europe). \n" // + + "location(saxony,germany). \n" // + + "location(dresden,saxony). \n" // + + "location(germany, europe) . \n" // + + "locatedIn(Egypt, Africa). \n" // + + "address(TSH, \"Pragerstraße 13\", \"01069\", dresden). \n" // + + "city(dresden). \n" // + + "country(germany). \n" // + + "university(tudresden, germany). \n" // + + "streetAddress(tudresden, \"Mommsenstraße 9\", \"01069\", \"Dresden\") . \n" // + + "zipLocation(\"01069\", dresden) . \n" // + + "locatedIn(germany, europe) . \n" // + + "locatedIn(dresden, saxony) . \n" // + + "locatedIn(saxony, germany) . \n" // + + "locatedIn(dresden, germany) . \n" // + + "locatedIn(dresden, europe) . \n" // + + "locatedIn(saxony, europe) . \n" // + + "address(tudresden, \"Mommsenstraße 9\", \"01069\", dresden) . \n" + + "() . \n"; + KnowledgeBase kb2 = new KnowledgeBase(); + KnowledgeBase kb3 = new KnowledgeBase(); + RuleParser.parseInto(kb2, facts); + try (final Reasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + File file = new File("test.txt"); + OutputStream stream = new FileOutputStream(file); + reasoner.writeInferredFacts(stream); + stream.flush(); + BufferedReader input = new BufferedReader(new FileReader(file)); + String factString = ""; + while ((factString = input.readLine()) != null) { + if (!factString.contains("_")) + RuleParser.parseInto(kb3, factString); + } + input.close(); + assertEquals(new HashSet(kb2.getFacts()), new HashSet(kb3.getFacts())); + file.delete(); + + } + + } } From 49830490a745122612b5ab1c540f262422812c2a Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 27 Jan 2020 12:00:48 +0100 Subject: [PATCH 0469/1003] rename file --- .../implementation/ExtensionSizeTest.java | 206 ------------------ ...SizeTest.java => QueryAnswerSizeTest.java} | 0 2 files changed, 206 deletions(-) delete mode 100644 vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExtensionSizeTest.java rename vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/{QuerySizeTest.java => QueryAnswerSizeTest.java} (100%) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExtensionSizeTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExtensionSizeTest.java deleted file mode 100644 index 8e16694b0..000000000 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExtensionSizeTest.java +++ /dev/null @@ -1,206 +0,0 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 - 2019 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import static org.junit.Assert.assertEquals; - -import java.io.IOException; - -import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; - -public class ExtensionSizeTest { - - private static final Predicate predP = Expressions.makePredicate("P", 1); - private static final Predicate predQ = Expressions.makePredicate("Q", 1); - private static final Predicate predR = Expressions.makePredicate("R", 2); - private static final Variable x = Expressions.makeUniversalVariable("x"); - private static final Variable y = Expressions.makeExistentialVariable("y"); - private static final Constant c = Expressions.makeAbstractConstant("c"); - private static final Constant d = Expressions.makeAbstractConstant("d"); - private static final Constant e = Expressions.makeAbstractConstant("e"); - private static final Constant f = Expressions.makeAbstractConstant("f"); - - private static final PositiveLiteral Px = Expressions.makePositiveLiteral(predP, x); - private static final PositiveLiteral Qx = Expressions.makePositiveLiteral(predQ, x); - private static final PositiveLiteral Qy = Expressions.makePositiveLiteral(predQ, y); - private static final PositiveLiteral Rxy = Expressions.makePositiveLiteral(predR, x, y); - private static final Conjunction conRxyQy = Expressions.makePositiveConjunction(Rxy, Qy); - private static final Conjunction conPx = Expressions.makeConjunction(Px); - - private static final Rule QxPx = Expressions.makeRule(Qx, Px); - private static final Rule RxyQyPx = Expressions.makeRule(conRxyQy, conPx); - - private static final Fact factPc = Expressions.makeFact(predP, c); - private static final Fact factPd = Expressions.makeFact(predP, d); - - private static final Fact factQe = Expressions.makeFact(predQ, e); - private static final Fact factQf = Expressions.makeFact(predQ, f); - - private static final PositiveLiteral Rdy = Expressions.makePositiveLiteral(predR, d, y); - private static final PositiveLiteral Rxd = Expressions.makePositiveLiteral(predR, x, d); - private static final PositiveLiteral Rxe = Expressions.makePositiveLiteral(predR, x, e); - - @Test - public void noFactsnoRules() throws IOException { - final KnowledgeBase kb = new KnowledgeBase(); - try (VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.reason(); - assertEquals(0, reasoner.getExtensionSize(Px)); - } - } - - @Test - public void noFactsUniversalRule() throws IOException { - final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatement(QxPx); - try (VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.reason(); - assertEquals(0, reasoner.getExtensionSize(Px)); - assertEquals(0, reasoner.getExtensionSize(Qx)); - assertEquals(0, reasoner.getExtensionSize(Rxy)); - } - } - - @Test - public void noFactsExistentialRule() throws IOException { - final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatement(RxyQyPx); - try (VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.reason(); - assertEquals(0, reasoner.getExtensionSize(Px)); - assertEquals(0, reasoner.getExtensionSize(Qx)); - assertEquals(0, reasoner.getExtensionSize(Rxy)); - } - } - - @Test - public void pFactsNoRules() throws IOException { - final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(factPc, factPd); - try (VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.reason(); - assertEquals(2, reasoner.getExtensionSize(Px)); - assertEquals(0, reasoner.getExtensionSize(Qx)); - assertEquals(0, reasoner.getExtensionSize(Rxy)); - } - } - - @Test - public void pFactsUniversalRule() throws IOException { - final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(factPc, factPd, QxPx); - try (VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.reason(); - assertEquals(2, reasoner.getExtensionSize(Px)); - assertEquals(2, reasoner.getExtensionSize(Qx)); - assertEquals(0, reasoner.getExtensionSize(Rxy)); - } - } - - @Test - public void pFactsExistentialRule() throws IOException { - final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(factPc, factPd, RxyQyPx); - try (VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.reason(); - assertEquals(2, reasoner.getExtensionSize(Px)); - assertEquals(2, reasoner.getExtensionSize(Qx)); - assertEquals(2, reasoner.getExtensionSize(Qx)); - assertEquals(2, reasoner.getExtensionSize(Rxy)); - } - } - - @Test - public void qFactsUniversalRule() throws IOException { - final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(factQe, factQf, RxyQyPx); - try (VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.reason(); - assertEquals(0, reasoner.getExtensionSize(Px)); - assertEquals(2, reasoner.getExtensionSize(Qx)); - assertEquals(0, reasoner.getExtensionSize(Rxy)); - } - } - - @Test - public void qFactsExistentialRule() throws IOException { - final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(factQe, factQf, RxyQyPx); - try (VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.reason(); - assertEquals(0, reasoner.getExtensionSize(Px)); - assertEquals(2, reasoner.getExtensionSize(Qx)); - assertEquals(0, reasoner.getExtensionSize(Rxy)); - } - } - - @Test - public void pFactsQFactsUniversalRule() throws IOException { - final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(factPc, factPd, factQe, factQf, QxPx); - try (VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.reason(); - assertEquals(2, reasoner.getExtensionSize(Px)); - assertEquals(4, reasoner.getExtensionSize(Qx)); - assertEquals(0, reasoner.getExtensionSize(Rxy)); - } - } - - @Test - public void pFactsQFactsExistentialRule() throws IOException { - final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(factPc, factPd, factQe, factQf, RxyQyPx); - try (VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.reason(); - assertEquals(2, reasoner.getExtensionSize(Px)); - assertEquals(4, reasoner.getExtensionSize(Qx)); - assertEquals(2, reasoner.getExtensionSize(Rxy)); - - assertEquals(2, reasoner.getExtensionSize(Rdy)); - assertEquals(2, reasoner.getExtensionSize(Rxe)); - } - } - - @Test - public void pFactsQFactsExistentialAndUniversalRule() throws IOException { - final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(factPc, factPd, factQe, factQf, QxPx, RxyQyPx); - try (VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.reason(); - assertEquals(2, reasoner.getExtensionSize(Px)); - assertEquals(6, reasoner.getExtensionSize(Qx)); - assertEquals(2, reasoner.getExtensionSize(Rxy)); - - assertEquals(2, reasoner.getExtensionSize(Rdy)); - assertEquals(2, reasoner.getExtensionSize(Rxd)); - } - } -} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QuerySizeTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java similarity index 100% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QuerySizeTest.java rename to vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java From aa58e430663726a04584adf6c6f52925b3eb87e5 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 27 Jan 2020 12:01:06 +0100 Subject: [PATCH 0470/1003] delete unused import --- .../main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java | 1 - 1 file changed, 1 deletion(-) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java index 906d3c368..3e4f13e69 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java @@ -22,7 +22,6 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.Iterator; import java.util.List; import org.apache.log4j.ConsoleAppender; From b6cea1e44767de6c29959fbeb55a45c491991925 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 27 Jan 2020 12:15:31 +0100 Subject: [PATCH 0471/1003] rename method; update javadoc --- .../vlog4j/core/reasoner/Reasoner.java | 56 ++----------------- .../reasoner/implementation/VLogReasoner.java | 16 ------ 2 files changed, 6 insertions(+), 66 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index e37561483..67de43fcf 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -330,60 +330,19 @@ public static Reasoner getInstance() { */ QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNulls); - // TODO add examples to query javadoc - /** - * Evaluates an atomic ({@code query}), and returns the number of implicit facts - * loaded into the reasoner and the explicit facts materialised by the reasoner, - * including nulls.
            - * An answer to the query is the terms a fact that matches the {@code query}: - * the fact predicate is the same as the {@code query} predicate, the - * {@link TermType#CONSTANT} terms of the {@code query} appear in the answer - * fact at the same term position, and the {@link TermType#VARIABLE} terms of - * the {@code query} are matched by terms in the fact, either named - * ({@link TermType#CONSTANT}) or anonymous ({@link TermType#NAMED_NULL}). The - * same variable name identifies the same term in the answer fact.
            - * A query answer is represented by a {@link QueryResult}. A query can have - * multiple, distinct query answers. This method returns an Iterator over these - * answers.
            - * - * Depending on the state of the reasoning (materialisation) and its - * {@link KnowledgeBase}, the answers can have a different {@link Correctness} - * ({@link QueryResultIterator#getCorrectness()}): - *
              - *
            • If {@link Correctness#SOUND_AND_COMPLETE}, materialisation over current - * knowledge base has completed, and the query answers are guaranteed to be - * correct.
            • - *
            • If {@link Correctness#SOUND_BUT_INCOMPLETE}, the results are guaranteed - * to be sound, but may be incomplete. This can happen - *
                - *
              • when materialisation has not completed ({@link Reasoner#reason()} returns - * {@code false}),
              • - *
              • or when the knowledge base was modified after reasoning, and the - * materialisation does not reflect the current knowledge base. - * Re-materialisation ({@link Reasoner#reason()}) is required in order to obtain - * complete query answers with respect to the current knowledge base.
              • - *
              - *
            • - *
            • If {@link Correctness#INCORRECT}, the results may be incomplete, and some - * results may be unsound. This can happen when the knowledge base was modified - * and the reasoner materialisation is no longer consistent with the current - * knowledge base. Re-materialisation ({@link Reasoner#reason()}) is required, - * in order to obtain correct query answers. - *
            - * - * + /* * @param query a {@link PositiveLiteral} representing the query to be answered. - * @return number of facts in the extension of the query. + * + * @return queryAnswerSize(query, true), the number of facts in the extension of + * the query. */ long queryAnswerSize(PositiveLiteral query); - // TODO add javadoc, examples - long getExtensionSize(PositiveLiteral literal); - // TODO add examples to query javadoc /** * Evaluates an atomic ({@code query}), and returns the number of implicit facts - * loaded into the reasoner and the explicit facts materialised by the reasoner. + * loaded into the reasoner and the number of explicit facts materialised by + * the reasoner. *
            * An answer to the query is the terms a fact that matches the {@code query}: * the fact predicate is the same as the {@code query} predicate, the @@ -392,9 +351,6 @@ public static Reasoner getInstance() { * the {@code query} are matched by terms in the fact, either named * ({@link TermType#CONSTANT}) or anonymous ({@link TermType#NAMED_NULL}). The * same variable name identifies the same term in the answer fact.
            - * A query answer is represented by a {@link QueryResult}. A query can have - * multiple, distinct query answers. This method returns an Iterator over these - * answers.
            * * Depending on the state of the reasoning (materialisation) and its * {@link KnowledgeBase}, the answers can have a different {@link Correctness} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index cb0b75f75..f6c6d2ca2 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -621,22 +621,6 @@ public long queryAnswerSize(PositiveLiteral query, boolean includeNulls) { return result; } - @Override - public long getExtensionSize(PositiveLiteral literal) { - validateNotClosed(); - validateKBLoaded("Querying is not alowed before reasoner is loaded!"); - - final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(literal); - - long result = 0; - try { - result = this.vLog.getExtensionSize(this.vLog.getPredicateId(vLogAtom.getPredicate())); - } catch (NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); - } - return result; - } - @Override public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final String csvFilePath, final boolean includeBlanks) throws IOException { From 4d8626809f5c738bb611de39063c94baa81fca48 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 27 Jan 2020 15:44:43 +0100 Subject: [PATCH 0472/1003] add test --- .../implementation/QueryAnswerSizeTest.java | 37 ++++++++++++++++--- 1 file changed, 31 insertions(+), 6 deletions(-) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java index 0c2699f32..b320f1e34 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java @@ -36,7 +36,7 @@ import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -public class QuerySizeTest { +public class QueryAnswerSizeTest { private static final Predicate predP = Expressions.makePredicate("P", 1); private static final Predicate predQ = Expressions.makePredicate("Q", 1); @@ -51,12 +51,17 @@ public class QuerySizeTest { private static final PositiveLiteral Px = Expressions.makePositiveLiteral(predP, x); private static final PositiveLiteral Qx = Expressions.makePositiveLiteral(predQ, x); private static final PositiveLiteral Qy = Expressions.makePositiveLiteral(predQ, y); + private static final PositiveLiteral Rxx = Expressions.makePositiveLiteral(predR, x, x); private static final PositiveLiteral Rxy = Expressions.makePositiveLiteral(predR, x, y); + private static final PositiveLiteral Ryy = Expressions.makePositiveLiteral(predR, y, y); + private static final Conjunction conRxyQy = Expressions.makePositiveConjunction(Rxy, Qy); + private static final Conjunction conRxxRxyRyy = Expressions.makePositiveConjunction(Rxx, Rxy, Ryy); private static final Conjunction conPx = Expressions.makeConjunction(Px); - private static final Rule ruleQxPx = Expressions.makeRule(Qx, Px); + private static final Rule QxPx = Expressions.makeRule(Qx, Px); private static final Rule RxyQyPx = Expressions.makeRule(conRxyQy, conPx); + private static final Rule RxxRxyRyyPx = Expressions.makeRule(conRxxRxyRyy, conPx); private static final Fact factPc = Expressions.makeFact(predP, c); private static final Fact factPd = Expressions.makeFact(predP, d); @@ -83,7 +88,7 @@ public void noFactsnoRules() throws IOException { @Test public void noFactsUniversalRule() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatement(ruleQxPx); + kb.addStatement(QxPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); assertEquals(0, reasoner.queryAnswerSize(Px, true)); @@ -119,7 +124,7 @@ public void pFactsNoRules() throws IOException { @Test public void pFactsUniversalRule() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(factPc, factPd, ruleQxPx); + kb.addStatements(factPc, factPd, QxPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); assertEquals(2, reasoner.queryAnswerSize(Px, true)); @@ -188,7 +193,7 @@ public void qFactsExistentialRule() throws IOException { @Test public void pFactsQFactsUniversalRule() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(factPc, factPd, factQe, factQf, ruleQxPx); + kb.addStatements(factPc, factPd, factQe, factQf, QxPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); assertEquals(2, reasoner.queryAnswerSize(Px)); @@ -234,7 +239,7 @@ public void pFactsQFactsExistentialRule() throws IOException { @Test public void pFactsQFactsExistentialAndUniversalRule() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(factPc, factPd, factQe, factQf, ruleQxPx, RxyQyPx); + kb.addStatements(factPc, factPd, factQe, factQf, QxPx, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); assertEquals(2, reasoner.queryAnswerSize(Px)); @@ -259,4 +264,24 @@ public void pFactsQFactsExistentialAndUniversalRule() throws IOException { } } + @Test + public void pFactsLiteralWithSameVariables() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd, RxxRxyRyyPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(2, reasoner.queryAnswerSize(Px, true)); + assertEquals(2, reasoner.queryAnswerSize(Px, false)); + + assertEquals(4, reasoner.queryAnswerSize(Rxx, true)); + assertEquals(2, reasoner.queryAnswerSize(Rxx, false)); + + assertEquals(6, reasoner.queryAnswerSize(Rxy, true)); + assertEquals(2, reasoner.queryAnswerSize(Rxy, false)); + + assertEquals(4, reasoner.queryAnswerSize(Ryy, true)); + assertEquals(2, reasoner.queryAnswerSize(Ryy, false)); + + } + } } From e4fdbfa41dcdabe1a86ba7647e9044a0fe2dbefd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Larry=20Gonz=C3=A1lez?= Date: Mon, 27 Jan 2020 16:36:33 +0100 Subject: [PATCH 0473/1003] fix typo --- .../org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java index f3b3129c2..20a82119b 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java @@ -73,7 +73,7 @@ public static void main(final String[] args) throws ParsingException, IOExceptio + "@prefix wdqs: ." // + "@prefix dbp: ." // + "@source dbpResult[2] : sparql(dbp:sparql, \"result,enwikipage\", '''" + dbpediaSparql + "''') ." // - + "@source wdResult[2]) : sparql(wdqs:sparql, \"result,enwikipage\", '''" + wikidataSparql + "''') ." // + + "@source wdResult[2] : sparql(wdqs:sparql, \"result,enwikipage\", '''" + wikidataSparql + "''') ." // + "% Rules:\n" // + "inWd(?Wikipage) :- wdResult(?WdId,?Wikipage)." // + "inDbp(?Wikipage) :- dbpResult(?DbpId,?Wikipage)." // From 52a86d9b5a34693557830afacab0084d9903d1d0 Mon Sep 17 00:00:00 2001 From: alloka Date: Thu, 30 Jan 2020 17:44:46 +0100 Subject: [PATCH 0474/1003] added some changes --- .../core/model/implementation/Serializer.java | 6 ++-- .../vlog4j/core/reasoner/Reasoner.java | 8 ++--- .../reasoner/implementation/VLogReasoner.java | 35 ++++++++++--------- .../vlog4j/syntax/parser/RuleParserTest.java | 2 +- 4 files changed, 27 insertions(+), 24 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 5fd525b96..a089d9815 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -391,8 +391,10 @@ private static String addAngleBrackets(final String string) { } public static String getFactOutput(Predicate predicate, List terms) { - return predicate.getName() + terms.toString().replace(OPENING_BRACKET, OPENING_PARENTHESIS) - .replace(CLOSING_BRACKET, CLOSING_PARENTHESIS + STATEMENT_SEPARATOR + "\n"); + return getIRIString(predicate.getName()) + + terms.toString().replace(terms.toString().substring(0, 1), OPENING_PARENTHESIS).replace( + terms.toString().substring(terms.toString().length() - 1, terms.toString().length()), + CLOSING_PARENTHESIS + STATEMENT_SEPARATOR + "\n"); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 65180c59f..b69847a2c 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -88,18 +88,18 @@ static Reasoner getInstance() { KnowledgeBase getKnowledgeBase(); /** - * Exports the inferred facts of the knowledge base to an OutputStream. + * Exports all the facts of the knowledge base to an OutputStream. * * @param an OutputStream for the facts to be written to. */ - void writeInferredFacts(OutputStream stream) throws IOException; + void writeFacts(OutputStream stream) throws IOException; /** - * Exports the inferred facts of the knowledge base to a desired file. + * Exports all the facts of the knowledge base to a desired file. * * @param a String of the file path for the facts to be written to. */ - void writeInferredFacts(String filePath) throws IOException; + void writeFacts(String filePath) throws IOException; /** * Sets the algorithm that will be used for reasoning over the knowledge base. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 7992dafb5..c4510263d 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -814,8 +814,8 @@ void setReasonerState(ReasonerState reasonerState) { } @Override - public void writeInferredFacts(OutputStream stream) throws IOException { - HashSet toBeQueriedHeadPredicates = new HashSet(); + public void writeFacts(OutputStream stream) throws IOException { + Set toBeQueriedHeadPredicates = new HashSet(); for (Rule rule : this.knowledgeBase.getRules()) { for (Literal literal : rule.getHead()) { toBeQueriedHeadPredicates.add(literal.getPredicate()); @@ -826,28 +826,29 @@ public void writeInferredFacts(OutputStream stream) throws IOException { stream.write((fact.toString() + "\n").getBytes()); } for (Predicate predicate : toBeQueriedHeadPredicates) { - ArrayList tobeGroundedVariables = new ArrayList(); + List tobeGroundedVariables = new ArrayList(); for (int i = 0; i < predicate.getArity(); i++) { tobeGroundedVariables.add(Expressions.makeUniversalVariable("X" + i)); } - final QueryResultIterator answers = this - .answerQuery(Expressions.makePositiveLiteral(predicate, tobeGroundedVariables), true); - answers.forEachRemaining(queryAnswer -> { - try { - stream.write(Serializer.getFactOutput(predicate, queryAnswer.getTerms()).getBytes()); - } + try (final QueryResultIterator answers = this + .answerQuery(Expressions.makePositiveLiteral(predicate, tobeGroundedVariables), true)) { + answers.forEachRemaining(queryAnswer -> { + try { + stream.write(Serializer.getFactOutput(predicate, queryAnswer.getTerms()).getBytes()); + } catch (IOException e) { + throw new RuntimeException(); + } + }); + + } - catch (IOException e) { - e.printStackTrace(); - } - }); } - stream.close(); } @Override - public void writeInferredFacts(String filePath) throws IOException { - OutputStream stream = new FileOutputStream(filePath); - writeInferredFacts(stream); + public void writeFacts(String filePath) throws IOException { + try (OutputStream stream = new FileOutputStream(filePath)) { + writeFacts(stream); + } } } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 6a62099eb..2b7078e0d 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -517,7 +517,7 @@ public void testGetFacts() throws ParsingException, IOException { reasoner.reason(); File file = new File("test.txt"); OutputStream stream = new FileOutputStream(file); - reasoner.writeInferredFacts(stream); + reasoner.writeFacts(stream); stream.flush(); BufferedReader input = new BufferedReader(new FileReader(file)); String factString = ""; From e3f34a4da5c5fd868e3269480c25fcc0b0c52c71 Mon Sep 17 00:00:00 2001 From: alloka Date: Sat, 1 Feb 2020 23:31:43 +0100 Subject: [PATCH 0475/1003] added facts from datasources --- .../reasoner/implementation/VLogReasoner.java | 4 ++ .../vlog4j/syntax/parser/RuleParserTest.java | 45 +++++-------------- 2 files changed, 16 insertions(+), 33 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index c4510263d..581e12a1b 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -821,10 +821,14 @@ public void writeFacts(OutputStream stream) throws IOException { toBeQueriedHeadPredicates.add(literal.getPredicate()); } } + for (DataSourceDeclaration dataSourceDeclaration : this.knowledgeBase.getDataSourceDeclarations()) { + toBeQueriedHeadPredicates.add(dataSourceDeclaration.getPredicate()); + } for (Fact fact : this.knowledgeBase.getFacts()) { if (!toBeQueriedHeadPredicates.contains(fact.getPredicate())) stream.write((fact.toString() + "\n").getBytes()); } + for (Predicate predicate : toBeQueriedHeadPredicates) { List tobeGroundedVariables = new ArrayList(); for (int i = 0; i < predicate.getArity(); i++) { diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 2b7078e0d..4e8c55cb3 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -52,8 +52,10 @@ import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; +import org.semanticweb.vlog4j.core.reasoner.implementation.InMemoryDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; import org.semanticweb.vlog4j.parser.DatatypeConstantHandler; @@ -458,50 +460,27 @@ public void testCustomDatatype() throws ParsingException { @Test public void testGetFacts() throws ParsingException, IOException { KnowledgeBase kb = new KnowledgeBase(); - final DataSource dataSource1 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var", - "?var wdt:P31 wd:Q5 ."); - final Predicate predicate1 = Expressions.makePredicate("country", 1); - final DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource1); - final DataSource dataSource2 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var", - "?var wdt:P31 wd:Q5 ."); - final Predicate predicate2 = Expressions.makePredicate("inEuropeOutsideGermany", 1); - final DataSourceDeclaration dataSourceDeclaration2 = new DataSourceDeclarationImpl(predicate2, dataSource2); - final DataSource dataSource3 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var2", - "?var2 wdt:P31 wd:Q5 ."); - final DataSourceDeclaration dataSourceDeclaration3 = new DataSourceDeclarationImpl(predicate2, dataSource3); - final Predicate predicate4 = Expressions.makePredicate("city", 1); - final DataSourceDeclaration dataSourceDeclaration4 = new DataSourceDeclarationImpl(predicate4, dataSource2); + final InMemoryDataSource locations = new InMemoryDataSource(2, 3); + locations.addTuple("germany", "europe"); + locations.addTuple("saxony", "germany"); + locations.addTuple("dresden", "saxony"); RuleParser.parseInto(kb, fact.toString() + "."); - RuleParser.parseInto(kb, dataSourceDeclaration1.toString()); - RuleParser.parseInto(kb, dataSourceDeclaration2.toString()); - RuleParser.parseInto(kb, dataSourceDeclaration3.toString()); - RuleParser.parseInto(kb, dataSourceDeclaration4.toString()); - final String rules = "location(germany,europe). \n" // - + "location(saxony,germany). \n" // - + "location(dresden,saxony). \n" // - + "locatedIn(Egypt,Africa). \n" // + final String sharedFacts = "locatedIn(Egypt,Africa). \n" // + "address(TSH, \"Pragerstraße 13\", \"01069\", dresden). \n" // + "city(dresden). \n" // + "country(germany). \n" // + "university(tudresden, germany). \n" // + "streetAddress(tudresden, \"Mommsenstraße 9\", \"01069\", \"Dresden\") . \n" // - + "zipLocation(\"01069\", dresden) . \n" // - + "locatedIn(?X,?Y) :- location(?X,?Y) . \n" // + + "zipLocation(\"01069\", dresden) . \n"; + final String rules = sharedFacts + "locatedIn(?X,?Y) :- location(?X,?Y) . \n" // + "locatedIn(?X,?Z) :- locatedIn(?X,?Y), locatedIn(?Y,?Z) . \n" // + "address(?Uni, ?Street, ?ZIP, ?City) :- streetAddress(?Uni, ?Street, ?ZIP, ?CityName), zipLocation(?ZIP, ?City) . \n" + "address(?Uni, !Street, !ZIP, !City), locatedIn(!City, ?Country) :- university(?Uni, ?Country) . \n"; RuleParser.parseInto(kb, rules); - final String facts = "location(germany,europe). \n" // + kb.addStatement(new DataSourceDeclarationImpl(Expressions.makePredicate("location", 2), locations)); + final String facts = sharedFacts + "location(dresden,saxony). \n" // + + "location(germany,europe). \n" // + "location(saxony,germany). \n" // - + "location(dresden,saxony). \n" // - + "location(germany, europe) . \n" // - + "locatedIn(Egypt, Africa). \n" // - + "address(TSH, \"Pragerstraße 13\", \"01069\", dresden). \n" // - + "city(dresden). \n" // - + "country(germany). \n" // - + "university(tudresden, germany). \n" // - + "streetAddress(tudresden, \"Mommsenstraße 9\", \"01069\", \"Dresden\") . \n" // - + "zipLocation(\"01069\", dresden) . \n" // + "locatedIn(germany, europe) . \n" // + "locatedIn(dresden, saxony) . \n" // + "locatedIn(saxony, germany) . \n" // From 5ee1b2e2a4a6b7ef9b7c33a90b42c34aa5959627 Mon Sep 17 00:00:00 2001 From: alloka Date: Sat, 1 Feb 2020 23:38:32 +0100 Subject: [PATCH 0476/1003] removed unused imports --- .../semanticweb/vlog4j/syntax/parser/RuleParserTest.java | 6 ------ 1 file changed, 6 deletions(-) diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 4e8c55cb3..b1f6c702b 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -29,7 +29,6 @@ import java.io.FileReader; import java.io.IOException; import java.io.OutputStream; -import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; @@ -38,13 +37,10 @@ import org.mockito.ArgumentMatchers; import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Statement; @@ -52,11 +48,9 @@ import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.implementation.InMemoryDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; import org.semanticweb.vlog4j.parser.DatatypeConstantHandler; import org.semanticweb.vlog4j.parser.ParserConfiguration; From 59f6bab8454a4dc0230dd84a6fdfa1786ab743e6 Mon Sep 17 00:00:00 2001 From: alloka Date: Fri, 7 Feb 2020 11:17:15 +0100 Subject: [PATCH 0477/1003] added some changes --- .../core/model/implementation/Serializer.java | 20 ++++++++++++++----- .../vlog4j/core/reasoner/Reasoner.java | 4 ++-- .../reasoner/implementation/VLogReasoner.java | 13 ++++++------ .../vlog4j/syntax/parser/RuleParserTest.java | 2 +- 4 files changed, 24 insertions(+), 15 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index a089d9815..41314fda7 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -390,11 +390,21 @@ private static String addAngleBrackets(final String string) { return LESS_THAN + string + MORE_THAN; } - public static String getFactOutput(Predicate predicate, List terms) { - return getIRIString(predicate.getName()) - + terms.toString().replace(terms.toString().substring(0, 1), OPENING_PARENTHESIS).replace( - terms.toString().substring(terms.toString().length() - 1, terms.toString().length()), - CLOSING_PARENTHESIS + STATEMENT_SEPARATOR + "\n"); + public static String getFactString(Predicate predicate, List terms) { + StringBuilder stringBuilder = new StringBuilder(""); + stringBuilder.append(getIRIString(predicate.getName())).append(OPENING_PARENTHESIS); + boolean first = true; + for (Term term : terms) { + if (first) { + first = false; + } else { + stringBuilder.append(COMMA); + } + final String string = term.getSyntacticRepresentation(); + stringBuilder.append(string); + } + stringBuilder.append(CLOSING_PARENTHESIS + STATEMENT_SEPARATOR + "\n"); + return stringBuilder.toString(); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index b69847a2c..598fea327 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -92,14 +92,14 @@ static Reasoner getInstance() { * * @param an OutputStream for the facts to be written to. */ - void writeFacts(OutputStream stream) throws IOException; + void writeInferences(OutputStream stream) throws IOException; /** * Exports all the facts of the knowledge base to a desired file. * * @param a String of the file path for the facts to be written to. */ - void writeFacts(String filePath) throws IOException; + void writeInferences(String filePath) throws IOException; /** * Sets the algorithm that will be used for reasoning over the knowledge base. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 581e12a1b..2323b2312 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -814,7 +814,7 @@ void setReasonerState(ReasonerState reasonerState) { } @Override - public void writeFacts(OutputStream stream) throws IOException { + public void writeInferences(OutputStream stream) throws IOException { Set toBeQueriedHeadPredicates = new HashSet(); for (Rule rule : this.knowledgeBase.getRules()) { for (Literal literal : rule.getHead()) { @@ -825,8 +825,7 @@ public void writeFacts(OutputStream stream) throws IOException { toBeQueriedHeadPredicates.add(dataSourceDeclaration.getPredicate()); } for (Fact fact : this.knowledgeBase.getFacts()) { - if (!toBeQueriedHeadPredicates.contains(fact.getPredicate())) - stream.write((fact.toString() + "\n").getBytes()); + toBeQueriedHeadPredicates.add(fact.getPredicate()); } for (Predicate predicate : toBeQueriedHeadPredicates) { @@ -838,9 +837,9 @@ public void writeFacts(OutputStream stream) throws IOException { .answerQuery(Expressions.makePositiveLiteral(predicate, tobeGroundedVariables), true)) { answers.forEachRemaining(queryAnswer -> { try { - stream.write(Serializer.getFactOutput(predicate, queryAnswer.getTerms()).getBytes()); + stream.write(Serializer.getFactString(predicate, queryAnswer.getTerms()).getBytes()); } catch (IOException e) { - throw new RuntimeException(); + throw new RuntimeException(e); } }); @@ -850,9 +849,9 @@ public void writeFacts(OutputStream stream) throws IOException { } @Override - public void writeFacts(String filePath) throws IOException { + public void writeInferences(String filePath) throws IOException { try (OutputStream stream = new FileOutputStream(filePath)) { - writeFacts(stream); + writeInferences(stream); } } } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index b1f6c702b..35f34c29c 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -490,7 +490,7 @@ public void testGetFacts() throws ParsingException, IOException { reasoner.reason(); File file = new File("test.txt"); OutputStream stream = new FileOutputStream(file); - reasoner.writeFacts(stream); + reasoner.writeInferences(stream); stream.flush(); BufferedReader input = new BufferedReader(new FileReader(file)); String factString = ""; From 399dda1195ba0fcec657f49be1d4b785c260ee33 Mon Sep 17 00:00:00 2001 From: alloka Date: Fri, 7 Feb 2020 13:49:47 +0100 Subject: [PATCH 0478/1003] added changes in unit test --- .../vlog4j/syntax/parser/RuleParserTest.java | 57 ++++++------------- 1 file changed, 17 insertions(+), 40 deletions(-) diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 35f34c29c..a10ef80bc 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -24,14 +24,13 @@ import static org.mockito.Mockito.mock; import java.io.BufferedReader; -import java.io.File; -import java.io.FileOutputStream; -import java.io.FileReader; +import java.io.ByteArrayOutputStream; import java.io.IOException; -import java.io.OutputStream; +import java.io.StringReader; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; +import java.util.List; import org.junit.Test; import org.mockito.ArgumentMatchers; @@ -452,56 +451,34 @@ public void testCustomDatatype() throws ParsingException { } @Test - public void testGetFacts() throws ParsingException, IOException { + public void testWriteInferences() throws ParsingException, IOException { KnowledgeBase kb = new KnowledgeBase(); - final InMemoryDataSource locations = new InMemoryDataSource(2, 3); - locations.addTuple("germany", "europe"); - locations.addTuple("saxony", "germany"); - locations.addTuple("dresden", "saxony"); + final InMemoryDataSource locations = new InMemoryDataSource(2, 1); + locations.addTuple("dresden", "germany"); RuleParser.parseInto(kb, fact.toString() + "."); - final String sharedFacts = "locatedIn(Egypt,Africa). \n" // + final String rules = "locatedIn(Egypt,Africa). \n" // + "address(TSH, \"Pragerstraße 13\", \"01069\", dresden). \n" // + "city(dresden). \n" // + "country(germany). \n" // + "university(tudresden, germany). \n" // - + "streetAddress(tudresden, \"Mommsenstraße 9\", \"01069\", \"Dresden\") . \n" // - + "zipLocation(\"01069\", dresden) . \n"; - final String rules = sharedFacts + "locatedIn(?X,?Y) :- location(?X,?Y) . \n" // - + "locatedIn(?X,?Z) :- locatedIn(?X,?Y), locatedIn(?Y,?Z) . \n" // - + "address(?Uni, ?Street, ?ZIP, ?City) :- streetAddress(?Uni, ?Street, ?ZIP, ?CityName), zipLocation(?ZIP, ?City) . \n" + + "locatedIn(?X,?Y) :- location(?X,?Y) . \n" // + "address(?Uni, !Street, !ZIP, !City), locatedIn(!City, ?Country) :- university(?Uni, ?Country) . \n"; RuleParser.parseInto(kb, rules); kb.addStatement(new DataSourceDeclarationImpl(Expressions.makePredicate("location", 2), locations)); - final String facts = sharedFacts + "location(dresden,saxony). \n" // - + "location(germany,europe). \n" // - + "location(saxony,germany). \n" // - + "locatedIn(germany, europe) . \n" // - + "locatedIn(dresden, saxony) . \n" // - + "locatedIn(saxony, germany) . \n" // - + "locatedIn(dresden, germany) . \n" // - + "locatedIn(dresden, europe) . \n" // - + "locatedIn(saxony, europe) . \n" // - + "address(tudresden, \"Mommsenstraße 9\", \"01069\", dresden) . \n" - + "() . \n"; - KnowledgeBase kb2 = new KnowledgeBase(); - KnowledgeBase kb3 = new KnowledgeBase(); - RuleParser.parseInto(kb2, facts); + List inferences = new ArrayList(); try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - File file = new File("test.txt"); - OutputStream stream = new FileOutputStream(file); + ByteArrayOutputStream stream = new ByteArrayOutputStream(); reasoner.writeInferences(stream); stream.flush(); - BufferedReader input = new BufferedReader(new FileReader(file)); - String factString = ""; - while ((factString = input.readLine()) != null) { - if (!factString.contains("_")) - RuleParser.parseInto(kb3, factString); - } - input.close(); - assertEquals(new HashSet(kb2.getFacts()), new HashSet(kb3.getFacts())); - file.delete(); + try (BufferedReader input = new BufferedReader(new StringReader(stream.toString()))) { + String factString = ""; + while ((factString = input.readLine()) != null) { + inferences.add(factString); + } + } + assertEquals(10, inferences.size()); } } From b535a8c26b711c3b8c752c66a37e736e48174664 Mon Sep 17 00:00:00 2001 From: alloka Date: Fri, 7 Feb 2020 13:53:14 +0100 Subject: [PATCH 0479/1003] removed unused imports --- .../org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java | 1 - 1 file changed, 1 deletion(-) diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index a10ef80bc..abca5ffd7 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -29,7 +29,6 @@ import java.io.StringReader; import java.util.ArrayList; import java.util.Arrays; -import java.util.HashSet; import java.util.List; import org.junit.Test; From 424a2850c25449fcaf83ad9a5a841af39fb7ea10 Mon Sep 17 00:00:00 2001 From: alloka Date: Sat, 8 Feb 2020 03:24:22 +0100 Subject: [PATCH 0480/1003] merged code in serializer --- .../core/model/implementation/Serializer.java | 22 +++++++------------ 1 file changed, 8 insertions(+), 14 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 41314fda7..e344d567e 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -132,19 +132,7 @@ public static String getString(final Literal literal) { if (literal.isNegated()) { stringBuilder.append(NEGATIVE_IDENTIFIER); } - stringBuilder.append(getIRIString(literal.getPredicate().getName())).append(OPENING_PARENTHESIS); - boolean first = true; - for (final Term term : literal.getArguments()) { - if (first) { - first = false; - } else { - stringBuilder.append(COMMA); - } - final String string = term.getSyntacticRepresentation(); - stringBuilder.append(string); - } - stringBuilder.append(CLOSING_PARENTHESIS); - return stringBuilder.toString(); + return getTermsString(literal.getArguments(), stringBuilder, literal.getPredicate()); } /** @@ -392,6 +380,11 @@ private static String addAngleBrackets(final String string) { public static String getFactString(Predicate predicate, List terms) { StringBuilder stringBuilder = new StringBuilder(""); + return getTermsString(terms, stringBuilder, predicate) + STATEMENT_SEPARATOR + "\n"; + + } + + public static String getTermsString(List terms, StringBuilder stringBuilder, Predicate predicate) { stringBuilder.append(getIRIString(predicate.getName())).append(OPENING_PARENTHESIS); boolean first = true; for (Term term : terms) { @@ -403,8 +396,9 @@ public static String getFactString(Predicate predicate, List terms) { final String string = term.getSyntacticRepresentation(); stringBuilder.append(string); } - stringBuilder.append(CLOSING_PARENTHESIS + STATEMENT_SEPARATOR + "\n"); + stringBuilder.append(CLOSING_PARENTHESIS); return stringBuilder.toString(); + } } From 54b68d1945133edca9047f5dfa2a6a3aeb3509fb Mon Sep 17 00:00:00 2001 From: alloka Date: Mon, 10 Feb 2020 17:41:15 +0100 Subject: [PATCH 0481/1003] added some changes --- .../core/model/implementation/Serializer.java | 9 +++++---- .../semanticweb/vlog4j/core/reasoner/Reasoner.java | 6 ++++-- .../core/reasoner/implementation/VLogReasoner.java | 13 ++++++++----- .../vlog4j/syntax/parser/RuleParserTest.java | 4 ++-- 4 files changed, 19 insertions(+), 13 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index e344d567e..09b6f7981 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -132,7 +132,8 @@ public static String getString(final Literal literal) { if (literal.isNegated()) { stringBuilder.append(NEGATIVE_IDENTIFIER); } - return getTermsString(literal.getArguments(), stringBuilder, literal.getPredicate()); + stringBuilder.append(getString(literal.getPredicate(), literal.getArguments())); + return stringBuilder.toString(); } /** @@ -379,12 +380,12 @@ private static String addAngleBrackets(final String string) { } public static String getFactString(Predicate predicate, List terms) { - StringBuilder stringBuilder = new StringBuilder(""); - return getTermsString(terms, stringBuilder, predicate) + STATEMENT_SEPARATOR + "\n"; + return getString(predicate, terms) + STATEMENT_SEPARATOR + "\n"; } - public static String getTermsString(List terms, StringBuilder stringBuilder, Predicate predicate) { + public static String getString(Predicate predicate, List terms) { + StringBuilder stringBuilder = new StringBuilder(""); stringBuilder.append(getIRIString(predicate.getName())).append(OPENING_PARENTHESIS); boolean first = true; for (Term term : terms) { diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 598fea327..62fb60c8a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -88,14 +88,16 @@ static Reasoner getInstance() { KnowledgeBase getKnowledgeBase(); /** - * Exports all the facts of the knowledge base to an OutputStream. + * Exports all the (explicit and implicit) facts of the knowledge base to an + * OutputStream. * * @param an OutputStream for the facts to be written to. */ void writeInferences(OutputStream stream) throws IOException; /** - * Exports all the facts of the knowledge base to a desired file. + * Exports all the (explicit and implicit) facts of the knowledge base to a + * desired file. * * @param a String of the file path for the facts to be written to. */ diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 2323b2312..0ed188ff0 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -22,6 +22,7 @@ import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.StatementVisitor; @@ -815,6 +816,7 @@ void setReasonerState(ReasonerState reasonerState) { @Override public void writeInferences(OutputStream stream) throws IOException { + QueryResult queryAnswer; Set toBeQueriedHeadPredicates = new HashSet(); for (Rule rule : this.knowledgeBase.getRules()) { for (Literal literal : rule.getHead()) { @@ -829,19 +831,20 @@ public void writeInferences(OutputStream stream) throws IOException { } for (Predicate predicate : toBeQueriedHeadPredicates) { - List tobeGroundedVariables = new ArrayList(); + List toBeGroundedVariables = new ArrayList(); for (int i = 0; i < predicate.getArity(); i++) { - tobeGroundedVariables.add(Expressions.makeUniversalVariable("X" + i)); + toBeGroundedVariables.add(Expressions.makeUniversalVariable("X" + i)); } try (final QueryResultIterator answers = this - .answerQuery(Expressions.makePositiveLiteral(predicate, tobeGroundedVariables), true)) { - answers.forEachRemaining(queryAnswer -> { + .answerQuery(Expressions.makePositiveLiteral(predicate, toBeGroundedVariables), true)) { + while (answers.hasNext()) { + queryAnswer = answers.next(); try { stream.write(Serializer.getFactString(predicate, queryAnswer.getTerms()).getBytes()); } catch (IOException e) { throw new RuntimeException(e); } - }); + } } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index abca5ffd7..61353b99d 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -68,7 +68,7 @@ public class RuleParserTest { private final Literal atom2 = Expressions.makePositiveLiteral("http://example.org/p", x, z); private final PositiveLiteral atom3 = Expressions.makePositiveLiteral("http://example.org/q", x, y); private final PositiveLiteral atom4 = Expressions.makePositiveLiteral("http://example.org/r", x, d); - private final PositiveLiteral fact = Expressions.makePositiveLiteral("http://example.org/s", c); + private final Fact fact = Expressions.makeFact("http://example.org/s", c); private final PositiveLiteral fact2 = Expressions.makePositiveLiteral("p", abc); private final Conjunction body1 = Expressions.makeConjunction(atom1, atom2); private final Conjunction body2 = Expressions.makeConjunction(negAtom1, atom2); @@ -454,7 +454,7 @@ public void testWriteInferences() throws ParsingException, IOException { KnowledgeBase kb = new KnowledgeBase(); final InMemoryDataSource locations = new InMemoryDataSource(2, 1); locations.addTuple("dresden", "germany"); - RuleParser.parseInto(kb, fact.toString() + "."); + kb.addStatement(fact); final String rules = "locatedIn(Egypt,Africa). \n" // + "address(TSH, \"Pragerstraße 13\", \"01069\", dresden). \n" // + "city(dresden). \n" // From 717156fd5a2caa1047145684836c8da092d4e854 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Mon, 10 Feb 2020 19:13:35 +0100 Subject: [PATCH 0482/1003] refactoring: * moved vLog-specific data structures into new VLogKnowledgeBase class * moved abstract method getConfigurationString from DatSource to VLogDataSource --- .../vlog4j/core/model/api/DataSource.java | 8 - .../implementation/InMemoryDataSource.java | 21 +- .../implementation/VLogDataSource.java | 8 + .../implementation/VLogKnowledgeBase.java | 282 ++++++++++++++ .../reasoner/implementation/VLogReasoner.java | 355 ++++-------------- 5 files changed, 378 insertions(+), 296 deletions(-) create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSource.java index 504603d71..888d30f77 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSource.java @@ -30,14 +30,6 @@ */ public interface DataSource extends Entity { - /** - * Constructs a String representation of the data source. - * - * @return a String representation of the data source configuration for a - * certain predicate. - */ - public String toConfigString(); - /** * Retrieve the required arity of target predicates for the data source. * diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java index 6a290f021..947b78078 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java @@ -48,8 +48,10 @@ public class InMemoryDataSource implements DataSource { * efficiency, the actual number of facts should exactly correspond to this * capacity. * - * @param arity the number of parameters in a fact from this source - * @param initialCapacity the planned number of facts + * @param arity + * the number of parameters in a fact from this source + * @param initialCapacity + * the planned number of facts */ public InMemoryDataSource(final int arity, final int initialCapacity) { this.capacity = initialCapacity; @@ -61,7 +63,8 @@ public InMemoryDataSource(final int arity, final int initialCapacity) { * Adds a fact to this data source. The number of constant names must agree with * the arity of this data source. * - * @param constantNames the string names of the constants in this fact + * @param constantNames + * the string names of the constants in this fact */ public void addTuple(final String... constantNames) { if (constantNames.length != this.arity) { @@ -97,7 +100,7 @@ public String[][] getData() { public String getSyntacticRepresentation() { final StringBuilder sb = new StringBuilder( "This InMemoryDataSource holds the following tuples of constant names, one tuple per line:"); - for (int i = 0; i < this.getData().length; i++) { + for (int i = 0; i < getData().length; i++) { for (int j = 0; j < this.data[i].length; j++) { sb.append(this.data[i][j] + " "); } @@ -106,14 +109,4 @@ public String getSyntacticRepresentation() { return sb.toString(); } - /** - * Returns null to indicate that this {@link DataSource} cannot be passed to - * VLog in a configuration string. - */ - - @Override - public String toConfigString() { - return null; - } - } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogDataSource.java index 04024d3d2..0cab0e979 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogDataSource.java @@ -33,4 +33,12 @@ public abstract class VLogDataSource implements DataSource { public static final String PREDICATE_NAME_CONFIG_LINE = "EDB%1$d_predname=%2$s\n"; public static final String DATASOURCE_TYPE_CONFIG_PARAM = "EDB%1$d_type"; + /** + * Constructs a String representation of the data source. + * + * @return a String representation of the data source configuration for a + * certain predicate. + */ + public abstract String toConfigString(); + } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java new file mode 100644 index 000000000..73f4adf18 --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java @@ -0,0 +1,282 @@ +package org.semanticweb.vlog4j.core.reasoner.implementation; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Formatter; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; + +import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.Literal; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Statement; +import org.semanticweb.vlog4j.core.model.api.StatementVisitor; +import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; +import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; +import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; +import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; +import org.semanticweb.vlog4j.core.model.implementation.UniversalVariableImpl; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; + +/** + * Class for organizing a Knowledge Base using vLog-specific data structures. + * + * @author Irina Dragoste + * + */ +public class VLogKnowledgeBase { + + private final Map edbPredicates = new HashMap<>(); + private final Map aliasesForEdbPredicates = new HashMap<>(); + + private final Set aliasedEdbPredicates = new HashSet<>(); + + private final Set idbPredicates = new HashSet<>(); + + private final Map> directEdbFacts = new HashMap<>(); + + private final Set rules = new HashSet<>(); + + /** + * Package-protected constructor, that organizes given {@code knowledgeBase} in + * vLog-specific data structures. + * + * @param knowledgeBase + */ + VLogKnowledgeBase(final KnowledgeBase knowledgeBase) { + final LoadKbVisitor visitor = this.new LoadKbVisitor(); + visitor.clearIndexes(); + for (final Statement statement : knowledgeBase) { + statement.accept(visitor); + } + } + + boolean hasData() { + return !this.edbPredicates.isEmpty() && !this.aliasedEdbPredicates.isEmpty(); + } + + public boolean hasRules() { + return !this.rules.isEmpty(); + } + + Predicate getAlias(final Predicate predicate) { + if (this.edbPredicates.containsKey(predicate)) { + return predicate; + } else { + return this.aliasesForEdbPredicates.get(new LocalFactsDataSourceDeclaration(predicate)); + } + } + + String getVLogDataSourcesConfigurationString() { + final StringBuilder configStringBuilder = new StringBuilder(); + final Formatter formatter = new Formatter(configStringBuilder); + int dataSourceIndex = 0; + + for (final Entry e : this.edbPredicates.entrySet()) { + dataSourceIndex = addDataSourceConfigurationString(e.getValue().getDataSource(), e.getKey(), + dataSourceIndex, formatter); + } + + for (final Entry e : this.aliasesForEdbPredicates.entrySet()) { + dataSourceIndex = addDataSourceConfigurationString(e.getKey().getDataSource(), e.getValue(), + dataSourceIndex, formatter); + } + + formatter.close(); + return configStringBuilder.toString(); + } + + int addDataSourceConfigurationString(final DataSource dataSource, final Predicate predicate, + final int dataSourceIndex, final Formatter formatter) { + int newDataSourceIndex = dataSourceIndex; + + if (dataSource != null) { + if (dataSource instanceof VLogDataSource) { + final VLogDataSource vLogDataSource = (VLogDataSource) dataSource; + final String configString = vLogDataSource.toConfigString(); + if (configString != null) { + formatter.format(configString, dataSourceIndex, ModelToVLogConverter.toVLogPredicate(predicate)); + newDataSourceIndex++; + } + } + } + + return newDataSourceIndex; + } + + Map getEdbPredicates() { + return this.edbPredicates; + } + + Map getAliasesForEdbPredicates() { + return this.aliasesForEdbPredicates; + } + + Map> getDirectEdbFacts() { + return this.directEdbFacts; + } + + Set getRules() { + return this.rules; + } + + /** + * + * Local visitor implementation for processing statements upon loading. Internal + * index structures are updated based on the statements that are detected. + * + * @author Markus Kroetzsch + */ + + class LoadKbVisitor implements StatementVisitor { + + public void clearIndexes() { + VLogKnowledgeBase.this.edbPredicates.clear(); + VLogKnowledgeBase.this.idbPredicates.clear(); + VLogKnowledgeBase.this.aliasedEdbPredicates.clear(); + VLogKnowledgeBase.this.aliasesForEdbPredicates.clear(); + VLogKnowledgeBase.this.directEdbFacts.clear(); + VLogKnowledgeBase.this.rules.clear(); + } + + @Override + public Void visit(final Fact fact) { + final Predicate predicate = fact.getPredicate(); + registerEdbDeclaration(new LocalFactsDataSourceDeclaration(predicate)); + if (!VLogKnowledgeBase.this.directEdbFacts.containsKey(predicate)) { + final List facts = new ArrayList<>(); + facts.add(fact); + VLogKnowledgeBase.this.directEdbFacts.put(predicate, facts); + } else { + VLogKnowledgeBase.this.directEdbFacts.get(predicate).add(fact); + } + return null; + } + + @Override + public Void visit(final Rule statement) { + VLogKnowledgeBase.this.rules.add(statement); + for (final PositiveLiteral positiveLiteral : statement.getHead()) { + final Predicate predicate = positiveLiteral.getPredicate(); + if (!VLogKnowledgeBase.this.idbPredicates.contains(predicate)) { + if (VLogKnowledgeBase.this.edbPredicates.containsKey(predicate)) { + addEdbAlias(VLogKnowledgeBase.this.edbPredicates.get(predicate)); + VLogKnowledgeBase.this.edbPredicates.remove(predicate); + } + VLogKnowledgeBase.this.idbPredicates.add(predicate); + } + } + return null; + } + + @Override + public Void visit(final DataSourceDeclaration statement) { + registerEdbDeclaration(statement); + return null; + } + + void registerEdbDeclaration(final DataSourceDeclaration dataSourceDeclaration) { + final Predicate predicate = dataSourceDeclaration.getPredicate(); + if (VLogKnowledgeBase.this.idbPredicates.contains(predicate) + || VLogKnowledgeBase.this.aliasedEdbPredicates.contains(predicate)) { + if (!VLogKnowledgeBase.this.aliasesForEdbPredicates.containsKey(dataSourceDeclaration)) { + addEdbAlias(dataSourceDeclaration); + } + } else { + final DataSourceDeclaration currentMainDeclaration = VLogKnowledgeBase.this.edbPredicates.get(predicate); + if (currentMainDeclaration == null) { + VLogKnowledgeBase.this.edbPredicates.put(predicate, dataSourceDeclaration); + } else if (!currentMainDeclaration.equals(dataSourceDeclaration)) { + addEdbAlias(currentMainDeclaration); + addEdbAlias(dataSourceDeclaration); + VLogKnowledgeBase.this.edbPredicates.remove(predicate); + } // else: predicate already known to have local facts (only) + } + } + + void addEdbAlias(final DataSourceDeclaration dataSourceDeclaration) { + final Predicate predicate = dataSourceDeclaration.getPredicate(); + Predicate aliasPredicate; + if (dataSourceDeclaration instanceof LocalFactsDataSourceDeclaration) { + aliasPredicate = new PredicateImpl(predicate.getName() + "##FACT", predicate.getArity()); + } else { + aliasPredicate = new PredicateImpl(predicate.getName() + "##" + dataSourceDeclaration.hashCode(), + predicate.getArity()); + } + VLogKnowledgeBase.this.aliasesForEdbPredicates.put(dataSourceDeclaration, aliasPredicate); + VLogKnowledgeBase.this.aliasedEdbPredicates.add(predicate); + + final List terms = new ArrayList<>(); + for (int i = 1; i <= predicate.getArity(); i++) { + terms.add(new UniversalVariableImpl("X" + i)); + } + final Literal body = new PositiveLiteralImpl(aliasPredicate, terms); + final PositiveLiteral head = new PositiveLiteralImpl(predicate, terms); + final Rule rule = new RuleImpl(new ConjunctionImpl<>(Arrays.asList(head)), + new ConjunctionImpl<>(Arrays.asList(body))); + VLogKnowledgeBase.this.rules.add(rule); + } + + } + + /** + * Dummy data source declaration for predicates for which we have explicit local + * facts in the input. + * + * @author Markus Kroetzsch + * + */ + class LocalFactsDataSourceDeclaration implements DataSourceDeclaration { + + final Predicate predicate; + + public LocalFactsDataSourceDeclaration(Predicate predicate) { + this.predicate = predicate; + } + + @Override + public T accept(StatementVisitor statementVisitor) { + return statementVisitor.visit(this); + } + + @Override + public Predicate getPredicate() { + return this.predicate; + } + + @Override + public DataSource getDataSource() { + return null; + } + + @Override + public int hashCode() { + return this.predicate.hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final LocalFactsDataSourceDeclaration other = (LocalFactsDataSourceDeclaration) obj; + return this.predicate.equals(other.predicate); + } + } + +} \ No newline at end of file diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 260db93e2..9c9acec91 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -2,33 +2,18 @@ import java.io.IOException; import java.text.MessageFormat; -import java.util.ArrayList; import java.util.Arrays; -import java.util.Formatter; -import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Set; import org.apache.commons.lang3.Validate; import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Statement; -import org.semanticweb.vlog4j.core.model.api.StatementVisitor; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; -import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; -import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; -import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; -import org.semanticweb.vlog4j.core.model.implementation.UniversalVariableImpl; import org.semanticweb.vlog4j.core.reasoner.AcyclicityNotion; import org.semanticweb.vlog4j.core.reasoner.Algorithm; import org.semanticweb.vlog4j.core.reasoner.Correctness; @@ -82,162 +67,9 @@ public class VLogReasoner implements Reasoner { private static Logger LOGGER = LoggerFactory.getLogger(VLogReasoner.class); - /** - * Dummy data source declaration for predicates for which we have explicit local - * facts in the input. - * - * @author Markus Kroetzsch - * - */ - class LocalFactsDataSourceDeclaration implements DataSourceDeclaration { - - final Predicate predicate; - - public LocalFactsDataSourceDeclaration(Predicate predicate) { - this.predicate = predicate; - } - - @Override - public T accept(StatementVisitor statementVisitor) { - return statementVisitor.visit(this); - } - - @Override - public Predicate getPredicate() { - return this.predicate; - } - - @Override - public DataSource getDataSource() { - return null; - } - - @Override - public int hashCode() { - return predicate.hashCode(); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - final LocalFactsDataSourceDeclaration other = (LocalFactsDataSourceDeclaration) obj; - return predicate.equals(other.predicate); - } - - } - - /** - * Local visitor implementation for processing statements upon loading. Internal - * index structures are updated based on the statements that are detected. - * - * @author Markus Kroetzsch - * - */ - class LoadKbVisitor implements StatementVisitor { - - public void clearIndexes() { - edbPredicates.clear(); - idbPredicates.clear(); - aliasedEdbPredicates.clear(); - aliasesForEdbPredicates.clear(); - directEdbFacts.clear(); - rules.clear(); - } - - @Override - public Void visit(Fact statement) { - final Predicate predicate = statement.getPredicate(); - registerEdbDeclaration(new LocalFactsDataSourceDeclaration(predicate)); - if (!directEdbFacts.containsKey(predicate)) { - final List facts = new ArrayList(); - facts.add(statement); - directEdbFacts.put(predicate, facts); - } else { - directEdbFacts.get(predicate).add(statement); - } - return null; - } - - @Override - public Void visit(Rule statement) { - rules.add(statement); - for (final PositiveLiteral positiveLiteral : statement.getHead()) { - final Predicate predicate = positiveLiteral.getPredicate(); - if (!idbPredicates.contains(predicate)) { - if (edbPredicates.containsKey(predicate)) { - addEdbAlias(edbPredicates.get(predicate)); - edbPredicates.remove(predicate); - } - idbPredicates.add(predicate); - } - } - return null; - } - - @Override - public Void visit(DataSourceDeclaration statement) { - registerEdbDeclaration(statement); - return null; - } - - void registerEdbDeclaration(DataSourceDeclaration dataSourceDeclaration) { - final Predicate predicate = dataSourceDeclaration.getPredicate(); - if (idbPredicates.contains(predicate) || aliasedEdbPredicates.contains(predicate)) { - if (!aliasesForEdbPredicates.containsKey(dataSourceDeclaration)) { - addEdbAlias(dataSourceDeclaration); - } - } else { - final DataSourceDeclaration currentMainDeclaration = edbPredicates.get(predicate); - if (currentMainDeclaration == null) { - edbPredicates.put(predicate, dataSourceDeclaration); - } else if (!(currentMainDeclaration.equals(dataSourceDeclaration))) { - addEdbAlias(currentMainDeclaration); - addEdbAlias(dataSourceDeclaration); - edbPredicates.remove(predicate); - } // else: predicate already known to have local facts (only) - } - } - - void addEdbAlias(DataSourceDeclaration dataSourceDeclaration) { - final Predicate predicate = dataSourceDeclaration.getPredicate(); - Predicate aliasPredicate; - if (dataSourceDeclaration instanceof LocalFactsDataSourceDeclaration) { - aliasPredicate = new PredicateImpl(predicate.getName() + "##FACT", predicate.getArity()); - } else { - aliasPredicate = new PredicateImpl(predicate.getName() + "##" + dataSourceDeclaration.hashCode(), - predicate.getArity()); - } - aliasesForEdbPredicates.put(dataSourceDeclaration, aliasPredicate); - aliasedEdbPredicates.add(predicate); - - final List terms = new ArrayList<>(); - for (int i = 1; i <= predicate.getArity(); i++) { - terms.add(new UniversalVariableImpl("X" + i)); - } - final Literal body = new PositiveLiteralImpl(aliasPredicate, terms); - final PositiveLiteral head = new PositiveLiteralImpl(predicate, terms); - final Rule rule = new RuleImpl(new ConjunctionImpl(Arrays.asList(head)), - new ConjunctionImpl(Arrays.asList(body))); - rules.add(rule); - } - - } - final KnowledgeBase knowledgeBase; final VLog vLog = new VLog(); - final Map aliasesForEdbPredicates = new HashMap<>(); - final Set idbPredicates = new HashSet<>(); - final Map edbPredicates = new HashMap<>(); - final Set aliasedEdbPredicates = new HashSet<>(); - final Map> directEdbFacts = new HashMap<>(); - final Set rules = new HashSet<>(); - private ReasonerState reasonerState = ReasonerState.KB_NOT_LOADED; private Correctness correctness = Correctness.SOUND_BUT_INCOMPLETE; @@ -332,131 +164,108 @@ void load() throws IOException { void loadKnowledgeBase() throws IOException { LOGGER.info("Started loading knowledge base ..."); - final LoadKbVisitor visitor = new LoadKbVisitor(); - visitor.clearIndexes(); - for (final Statement statement : knowledgeBase) { - statement.accept(visitor); - } - if (edbPredicates.isEmpty() && aliasedEdbPredicates.isEmpty()) { - LOGGER.warn("No facts have been provided."); - } + final VLogKnowledgeBase vLogKB = new VLogKnowledgeBase(this.knowledgeBase); - try { - this.vLog.start(getDataSourcesConfigurationString(), false); - } catch (final AlreadyStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); - } catch (final EDBConfigurationException e) { - throw new RuntimeException("Invalid data sources configuration.", e); + if (!vLogKB.hasData()) { + LOGGER.warn("No data statements (facts or datasource declarations) have been provided."); } - loadInMemoryDataSources(); - validateDataSourcePredicateArities(); + // 1. vLog is initialized by loading VLog data sources + loadVLogDataSources(vLogKB); + + // 2. in-memory data is loaded + loadInMemoryDataSources(vLogKB); + + validateDataSourcePredicateArities(vLogKB); - loadFacts(); - loadRules(); + loadFacts(vLogKB); + + // 3. rules are loaded + loadRules(vLogKB); this.reasonerState = ReasonerState.KB_LOADED; // if there are no rules, then materialisation state is complete - this.correctness = rules.isEmpty() ? Correctness.SOUND_AND_COMPLETE : Correctness.SOUND_BUT_INCOMPLETE; + this.correctness = !vLogKB.hasRules() ? Correctness.SOUND_AND_COMPLETE + : Correctness.SOUND_BUT_INCOMPLETE; LOGGER.info("Finished loading knowledge base."); } - String getDataSourcesConfigurationString() { - final StringBuilder configStringBuilder = new StringBuilder(); - final Formatter formatter = new Formatter(configStringBuilder); - int dataSourceIndex = 0; - for (final Predicate predicate : this.edbPredicates.keySet()) { - final DataSourceDeclaration dataSourceDeclaration = this.edbPredicates.get(predicate); - dataSourceIndex = addDataSourceConfigurationString(dataSourceDeclaration.getDataSource(), predicate, - dataSourceIndex, formatter); + void loadVLogDataSources(final VLogKnowledgeBase vLogKB) throws IOException { + try { + this.vLog.start(vLogKB.getVLogDataSourcesConfigurationString(), false); + } catch (final AlreadyStartedException e) { + throw new RuntimeException("Inconsistent reasoner state.", e); + } catch (final EDBConfigurationException e) { + throw new RuntimeException("Invalid data sources configuration.", e); } - for (final DataSourceDeclaration dataSourceDeclaration : this.aliasesForEdbPredicates.keySet()) { - final Predicate aliasPredicate = this.aliasesForEdbPredicates.get(dataSourceDeclaration); - dataSourceIndex = addDataSourceConfigurationString(dataSourceDeclaration.getDataSource(), aliasPredicate, - dataSourceIndex, formatter); + } + + void loadInMemoryDataSources(final VLogKnowledgeBase vLogKB) { + vLogKB.getEdbPredicates().forEach((k, v) -> loadInMemoryDataSource(v.getDataSource(), k)); + + vLogKB.getAliasesForEdbPredicates().forEach((k, v) -> loadInMemoryDataSource(k.getDataSource(), v)); + } + + void loadInMemoryDataSource(final DataSource dataSource, final Predicate predicate) { + if (dataSource instanceof InMemoryDataSource) { + + final InMemoryDataSource inMemoryDataSource = (InMemoryDataSource) dataSource; + try { + load(predicate, inMemoryDataSource); + } catch (final EDBConfigurationException e) { + throw new RuntimeException("Invalid data sources configuration!", e); + } } - formatter.close(); - return configStringBuilder.toString(); - } - - int addDataSourceConfigurationString(DataSource dataSource, Predicate predicate, int dataSourceIndex, - Formatter formatter) { - if (dataSource != null) { - final String configString = dataSource.toConfigString(); - if (configString != null) { - formatter.format(dataSource.toConfigString(), dataSourceIndex, - ModelToVLogConverter.toVLogPredicate(predicate)); - return dataSourceIndex + 1; + } + + void load(final Predicate predicate, final InMemoryDataSource inMemoryDataSource) throws EDBConfigurationException { + final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(predicate); + + this.vLog.addData(vLogPredicateName, inMemoryDataSource.getData()); + + if (LOGGER.isDebugEnabled()) { + for (final String[] tuple : inMemoryDataSource.getData()) { + LOGGER.debug("Loaded direct fact {}{}.", vLogPredicateName, Arrays.toString(tuple)); } } - return dataSourceIndex; } /** * Checks if the loaded external data sources do in fact contain data of the * correct arity. - * - * @throws IncompatiblePredicateArityException to indicate a problem - * (non-checked exception) + * + * @throws IncompatiblePredicateArityException + * to indicate a problem (non-checked exception) */ - void validateDataSourcePredicateArities() throws IncompatiblePredicateArityException { - for (final Predicate predicate : edbPredicates.keySet()) { - validateDataSourcePredicateArity(predicate, edbPredicates.get(predicate).getDataSource()); - } - for (final DataSourceDeclaration dataSourceDeclaration : aliasesForEdbPredicates.keySet()) { - validateDataSourcePredicateArity(aliasesForEdbPredicates.get(dataSourceDeclaration), - dataSourceDeclaration.getDataSource()); - } - } + void validateDataSourcePredicateArities(final VLogKnowledgeBase vLogKB) + throws IncompatiblePredicateArityException { - void loadInMemoryDataSources() { - for (final Predicate predicate : this.edbPredicates.keySet()) { - final DataSourceDeclaration dataSourceDeclaration = this.edbPredicates.get(predicate); - loadInMemoryDataSource(dataSourceDeclaration.getDataSource(), predicate); - } - for (final DataSourceDeclaration dataSourceDeclaration : this.aliasesForEdbPredicates.keySet()) { - final Predicate aliasPredicate = this.aliasesForEdbPredicates.get(dataSourceDeclaration); - loadInMemoryDataSource(dataSourceDeclaration.getDataSource(), aliasPredicate); - } - } + vLogKB.getEdbPredicates().forEach((k, v) -> validateDataSourcePredicateArity(k, v.getDataSource())); - void loadInMemoryDataSource(DataSource dataSource, Predicate predicate) { - final InMemoryDataSource inMemoryDataSource; - if (dataSource instanceof InMemoryDataSource) { - inMemoryDataSource = (InMemoryDataSource) dataSource; - } else { - return; - } - try { - final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(predicate); - this.vLog.addData(vLogPredicateName, inMemoryDataSource.getData()); - if (LOGGER.isDebugEnabled()) { - for (final String[] tuple : inMemoryDataSource.getData()) { - LOGGER.debug("Loaded direct fact {}{}.", vLogPredicateName, Arrays.toString(tuple)); - } - } - } catch (final EDBConfigurationException e) { - throw new RuntimeException("Invalid data sources configuration!", e); - } + vLogKB.getAliasesForEdbPredicates() + .forEach((k, v) -> validateDataSourcePredicateArity(v, k.getDataSource())); } /** * Checks if the loaded external data for a given source does in fact contain * data of the correct arity for the given predidate. - * - * @param predicate the predicate for which data is loaded - * @param dataSource the data source used - * - * @throws IncompatiblePredicateArityException to indicate a problem - * (non-checked exception) + * + * @param predicate + * the predicate for which data is loaded + * @param dataSource + * the data source used + * + * @throws IncompatiblePredicateArityException + * to indicate a problem (non-checked exception) */ void validateDataSourcePredicateArity(Predicate predicate, DataSource dataSource) throws IncompatiblePredicateArityException { - if (dataSource == null) + if (dataSource == null) { return; + } try { final int dataSourcePredicateArity = this.vLog .getPredicateArity(ModelToVLogConverter.toVLogPredicate(predicate)); @@ -470,19 +279,16 @@ void validateDataSourcePredicateArity(Predicate predicate, DataSource dataSource } } - void loadFacts() { - for (final Predicate predicate : directEdbFacts.keySet()) { - Predicate aliasPredicate; - if (edbPredicates.containsKey(predicate)) { - aliasPredicate = predicate; - } else { - aliasPredicate = aliasesForEdbPredicates.get(new LocalFactsDataSourceDeclaration(predicate)); - } + void loadFacts(final VLogKnowledgeBase vLogKB) { + final Map> directEdbFacts = vLogKB.getDirectEdbFacts(); + + directEdbFacts.forEach((k, v) -> { try { - final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(aliasPredicate); - final String[][] vLogPredicateTuples = ModelToVLogConverter - .toVLogFactTuples(directEdbFacts.get(predicate)); + final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(vLogKB.getAlias(k)); + final String[][] vLogPredicateTuples = ModelToVLogConverter.toVLogFactTuples(v); + this.vLog.addData(vLogPredicateName, vLogPredicateTuples); + if (LOGGER.isDebugEnabled()) { for (final String[] tuple : vLogPredicateTuples) { LOGGER.debug("Loaded direct fact {}{}.", vLogPredicateName, Arrays.toString(tuple)); @@ -491,11 +297,12 @@ void loadFacts() { } catch (final EDBConfigurationException e) { throw new RuntimeException("Invalid data sources configuration!", e); } - } + + }); } - void loadRules() { - final karmaresearch.vlog.Rule[] vLogRuleArray = ModelToVLogConverter.toVLogRuleArray(rules); + void loadRules(final VLogKnowledgeBase vLogKB) { + final karmaresearch.vlog.Rule[] vLogRuleArray = ModelToVLogConverter.toVLogRuleArray(vLogKB.getRules()); final karmaresearch.vlog.VLog.RuleRewriteStrategy vLogRuleRewriteStrategy = ModelToVLogConverter .toVLogRuleRewriteStrategy(this.ruleRewriteStrategy); try { @@ -707,7 +514,7 @@ private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) { if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { try { load(); - } catch (IOException e) { // FIXME: quick fix for https://github.com/knowsys/vlog4j/issues/128 + } catch (final IOException e) { // FIXME: quick fix for https://github.com/knowsys/vlog4j/issues/128 throw new RuntimeException(e); } } From 27cf1fa34e8d1ad7fe83ecbccbbcce4671915b65 Mon Sep 17 00:00:00 2001 From: alloka Date: Thu, 13 Feb 2020 13:30:53 +0100 Subject: [PATCH 0483/1003] added some changes --- .../vlog4j/core/reasoner/Reasoner.java | 8 +++++--- .../reasoner/implementation/VLogReasoner.java | 19 +++++++++++++++---- 2 files changed, 20 insertions(+), 7 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 62fb60c8a..d5effec06 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -88,12 +88,14 @@ static Reasoner getInstance() { KnowledgeBase getKnowledgeBase(); /** - * Exports all the (explicit and implicit) facts of the knowledge base to an - * OutputStream. + * @return the correctness of the query answers, depending on the state of the + * reasoning (materialisation) and its {@link KnowledgeBase} Exports all + * the (explicit and implicit) facts of the knowledge base to an + * OutputStream. * * @param an OutputStream for the facts to be written to. */ - void writeInferences(OutputStream stream) throws IOException; + Correctness writeInferences(OutputStream stream) throws IOException; /** * Exports all the (explicit and implicit) facts of the knowledge base to a diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 0ed188ff0..8ac0944d0 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -815,9 +815,11 @@ void setReasonerState(ReasonerState reasonerState) { } @Override - public void writeInferences(OutputStream stream) throws IOException { + public Correctness writeInferences(OutputStream stream) throws IOException { QueryResult queryAnswer; Set toBeQueriedHeadPredicates = new HashSet(); + TermQueryResultIterator stringQueryResultIterator; + QueryResultIterator answers; for (Rule rule : this.knowledgeBase.getRules()) { for (Literal literal : rule.getHead()) { toBeQueriedHeadPredicates.add(literal.getPredicate()); @@ -835,8 +837,11 @@ public void writeInferences(OutputStream stream) throws IOException { for (int i = 0; i < predicate.getArity(); i++) { toBeGroundedVariables.add(Expressions.makeUniversalVariable("X" + i)); } - try (final QueryResultIterator answers = this - .answerQuery(Expressions.makePositiveLiteral(predicate, toBeGroundedVariables), true)) { + final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter + .toVLogAtom(Expressions.makePositiveLiteral(predicate, toBeGroundedVariables)); + try { + stringQueryResultIterator = this.vLog.query(vLogAtom, true, false); + answers = new VLogQueryResultIterator(stringQueryResultIterator, this.correctness); while (answers.hasNext()) { queryAnswer = answers.next(); try { @@ -845,10 +850,16 @@ public void writeInferences(OutputStream stream) throws IOException { throw new RuntimeException(e); } } - + } catch (final NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state.", e); + } catch (final NonExistingPredicateException e1) { + LOGGER.warn("Query uses predicate " + predicate + + " that does not occur in the knowledge base. Answer must be empty!"); } } + return this.correctness; + } @Override From e0f7b6079451d1d16dddbc7438d8e5723f8f56fd Mon Sep 17 00:00:00 2001 From: alloka Date: Thu, 13 Feb 2020 15:53:11 +0100 Subject: [PATCH 0484/1003] added some changes --- .../vlog4j/core/reasoner/Reasoner.java | 14 ++++++----- .../reasoner/implementation/VLogReasoner.java | 23 +++++++++++-------- 2 files changed, 22 insertions(+), 15 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index d5effec06..9986ddac0 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -88,22 +88,24 @@ static Reasoner getInstance() { KnowledgeBase getKnowledgeBase(); /** - * @return the correctness of the query answers, depending on the state of the - * reasoning (materialisation) and its {@link KnowledgeBase} Exports all - * the (explicit and implicit) facts of the knowledge base to an - * OutputStream. + * Exports all the (explicit and implicit) facts of the knowledge base to an + * OutputStream. * * @param an OutputStream for the facts to be written to. + * @return the correctness of the query answers, depending on the state of the + * reasoning (materialisation) and its {@link KnowledgeBase}. */ Correctness writeInferences(OutputStream stream) throws IOException; /** * Exports all the (explicit and implicit) facts of the knowledge base to a * desired file. - * + * * @param a String of the file path for the facts to be written to. + * @return the correctness of the query answers, depending on the state of the + * reasoning (materialisation) and its {@link KnowledgeBase}. */ - void writeInferences(String filePath) throws IOException; + Correctness writeInferences(String filePath) throws IOException; /** * Sets the algorithm that will be used for reasoning over the knowledge base. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 8ac0944d0..664710240 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -22,7 +22,6 @@ import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.StatementVisitor; @@ -816,10 +815,11 @@ void setReasonerState(ReasonerState reasonerState) { @Override public Correctness writeInferences(OutputStream stream) throws IOException { - QueryResult queryAnswer; + validateNotClosed(); + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { + throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); + } Set toBeQueriedHeadPredicates = new HashSet(); - TermQueryResultIterator stringQueryResultIterator; - QueryResultIterator answers; for (Rule rule : this.knowledgeBase.getRules()) { for (Literal literal : rule.getHead()) { toBeQueriedHeadPredicates.add(literal.getPredicate()); @@ -840,12 +840,12 @@ public Correctness writeInferences(OutputStream stream) throws IOException { final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter .toVLogAtom(Expressions.makePositiveLiteral(predicate, toBeGroundedVariables)); try { - stringQueryResultIterator = this.vLog.query(vLogAtom, true, false); - answers = new VLogQueryResultIterator(stringQueryResultIterator, this.correctness); + final TermQueryResultIterator answers = this.vLog.query(vLogAtom, true, false); while (answers.hasNext()) { - queryAnswer = answers.next(); + final karmaresearch.vlog.Term[] vlogTerms = answers.next(); + final List termList = VLogToModelConverter.toTermList(vlogTerms); try { - stream.write(Serializer.getFactString(predicate, queryAnswer.getTerms()).getBytes()); + stream.write(Serializer.getFactString(predicate, termList).getBytes()); } catch (IOException e) { throw new RuntimeException(e); } @@ -855,17 +855,22 @@ public Correctness writeInferences(OutputStream stream) throws IOException { } catch (final NonExistingPredicateException e1) { LOGGER.warn("Query uses predicate " + predicate + " that does not occur in the knowledge base. Answer must be empty!"); + throw new RuntimeException("Inconsistent knowledge base state.", e1); } } + logWarningOnCorrectness(); return this.correctness; } @Override - public void writeInferences(String filePath) throws IOException { + public Correctness writeInferences(String filePath) throws IOException { try (OutputStream stream = new FileOutputStream(filePath)) { writeInferences(stream); } + logWarningOnCorrectness(); + return this.correctness; } + } From 08def24bc48cedd70d737b9cff23901f69a4b5d7 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Mon, 17 Feb 2020 11:38:26 +0100 Subject: [PATCH 0485/1003] rework after review cleaner code --- .../vlog4j/core/reasoner/Reasoner.java | 20 +- .../reasoner/implementation/VLogReasoner.java | 270 +++++++++--------- 2 files changed, 153 insertions(+), 137 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 9986ddac0..dd4fd9b21 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -1,5 +1,6 @@ package org.semanticweb.vlog4j.core.reasoner; +import java.io.FileNotFoundException; import java.io.IOException; import java.io.OutputStream; @@ -88,24 +89,29 @@ static Reasoner getInstance() { KnowledgeBase getKnowledgeBase(); /** - * Exports all the (explicit and implicit) facts of the knowledge base to an - * OutputStream. + * Exports all the (explicit and implicit) facts inferred during reasoning of + * the knowledge base to an OutputStream. * - * @param an OutputStream for the facts to be written to. + * @param an + * OutputStream for the facts to be written to. * @return the correctness of the query answers, depending on the state of the * reasoning (materialisation) and its {@link KnowledgeBase}. + * @throws IOException */ Correctness writeInferences(OutputStream stream) throws IOException; /** - * Exports all the (explicit and implicit) facts of the knowledge base to a - * desired file. + * Exports all the (explicit and implicit) facts inferred during reasoning of + * the knowledge base to a desired file. * - * @param a String of the file path for the facts to be written to. + * @param a + * String of the file path for the facts to be written to. * @return the correctness of the query answers, depending on the state of the * reasoning (materialisation) and its {@link KnowledgeBase}. + * @throws IOException + * @throws FileNotFoundException */ - Correctness writeInferences(String filePath) throws IOException; + Correctness writeInferences(String filePath) throws FileNotFoundException, IOException; /** * Sets the algorithm that will be used for reasoning over the knowledge base. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 664710240..a55f07e2e 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -1,5 +1,6 @@ package org.semanticweb.vlog4j.core.reasoner.implementation; +import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; @@ -118,19 +119,22 @@ public DataSource getDataSource() { @Override public int hashCode() { - return predicate.hashCode(); + return this.predicate.hashCode(); } @Override public boolean equals(Object obj) { - if (this == obj) + if (this == obj) { return true; - if (obj == null) + } + if (obj == null) { return false; - if (getClass() != obj.getClass()) + } + if (getClass() != obj.getClass()) { return false; + } final LocalFactsDataSourceDeclaration other = (LocalFactsDataSourceDeclaration) obj; - return predicate.equals(other.predicate); + return this.predicate.equals(other.predicate); } } @@ -145,39 +149,39 @@ public boolean equals(Object obj) { class LoadKbVisitor implements StatementVisitor { public void clearIndexes() { - edbPredicates.clear(); - idbPredicates.clear(); - aliasedEdbPredicates.clear(); - aliasesForEdbPredicates.clear(); - directEdbFacts.clear(); - rules.clear(); + VLogReasoner.this.edbPredicates.clear(); + VLogReasoner.this.idbPredicates.clear(); + VLogReasoner.this.aliasedEdbPredicates.clear(); + VLogReasoner.this.aliasesForEdbPredicates.clear(); + VLogReasoner.this.directEdbFacts.clear(); + VLogReasoner.this.rules.clear(); } @Override public Void visit(Fact statement) { final Predicate predicate = statement.getPredicate(); registerEdbDeclaration(new LocalFactsDataSourceDeclaration(predicate)); - if (!directEdbFacts.containsKey(predicate)) { - final List facts = new ArrayList(); + if (!VLogReasoner.this.directEdbFacts.containsKey(predicate)) { + final List facts = new ArrayList<>(); facts.add(statement); - directEdbFacts.put(predicate, facts); + VLogReasoner.this.directEdbFacts.put(predicate, facts); } else { - directEdbFacts.get(predicate).add(statement); + VLogReasoner.this.directEdbFacts.get(predicate).add(statement); } return null; } @Override public Void visit(Rule statement) { - rules.add(statement); + VLogReasoner.this.rules.add(statement); for (final PositiveLiteral positiveLiteral : statement.getHead()) { final Predicate predicate = positiveLiteral.getPredicate(); - if (!idbPredicates.contains(predicate)) { - if (edbPredicates.containsKey(predicate)) { - addEdbAlias(edbPredicates.get(predicate)); - edbPredicates.remove(predicate); + if (!VLogReasoner.this.idbPredicates.contains(predicate)) { + if (VLogReasoner.this.edbPredicates.containsKey(predicate)) { + addEdbAlias(VLogReasoner.this.edbPredicates.get(predicate)); + VLogReasoner.this.edbPredicates.remove(predicate); } - idbPredicates.add(predicate); + VLogReasoner.this.idbPredicates.add(predicate); } } return null; @@ -191,18 +195,19 @@ public Void visit(DataSourceDeclaration statement) { void registerEdbDeclaration(DataSourceDeclaration dataSourceDeclaration) { final Predicate predicate = dataSourceDeclaration.getPredicate(); - if (idbPredicates.contains(predicate) || aliasedEdbPredicates.contains(predicate)) { - if (!aliasesForEdbPredicates.containsKey(dataSourceDeclaration)) { + if (VLogReasoner.this.idbPredicates.contains(predicate) + || VLogReasoner.this.aliasedEdbPredicates.contains(predicate)) { + if (!VLogReasoner.this.aliasesForEdbPredicates.containsKey(dataSourceDeclaration)) { addEdbAlias(dataSourceDeclaration); } } else { - final DataSourceDeclaration currentMainDeclaration = edbPredicates.get(predicate); + final DataSourceDeclaration currentMainDeclaration = VLogReasoner.this.edbPredicates.get(predicate); if (currentMainDeclaration == null) { - edbPredicates.put(predicate, dataSourceDeclaration); - } else if (!(currentMainDeclaration.equals(dataSourceDeclaration))) { + VLogReasoner.this.edbPredicates.put(predicate, dataSourceDeclaration); + } else if (!currentMainDeclaration.equals(dataSourceDeclaration)) { addEdbAlias(currentMainDeclaration); addEdbAlias(dataSourceDeclaration); - edbPredicates.remove(predicate); + VLogReasoner.this.edbPredicates.remove(predicate); } // else: predicate already known to have local facts (only) } } @@ -216,8 +221,8 @@ void addEdbAlias(DataSourceDeclaration dataSourceDeclaration) { aliasPredicate = new PredicateImpl(predicate.getName() + "##" + dataSourceDeclaration.hashCode(), predicate.getArity()); } - aliasesForEdbPredicates.put(dataSourceDeclaration, aliasPredicate); - aliasedEdbPredicates.add(predicate); + VLogReasoner.this.aliasesForEdbPredicates.put(dataSourceDeclaration, aliasPredicate); + VLogReasoner.this.aliasedEdbPredicates.add(predicate); final List terms = new ArrayList<>(); for (int i = 1; i <= predicate.getArity(); i++) { @@ -225,9 +230,9 @@ void addEdbAlias(DataSourceDeclaration dataSourceDeclaration) { } final Literal body = new PositiveLiteralImpl(aliasPredicate, terms); final PositiveLiteral head = new PositiveLiteralImpl(predicate, terms); - final Rule rule = new RuleImpl(new ConjunctionImpl(Arrays.asList(head)), - new ConjunctionImpl(Arrays.asList(body))); - rules.add(rule); + final Rule rule = new RuleImpl(new ConjunctionImpl<>(Arrays.asList(head)), + new ConjunctionImpl<>(Arrays.asList(body))); + VLogReasoner.this.rules.add(rule); } } @@ -338,11 +343,11 @@ void loadKnowledgeBase() throws IOException { LOGGER.info("Started loading knowledge base ..."); final LoadKbVisitor visitor = new LoadKbVisitor(); visitor.clearIndexes(); - for (final Statement statement : knowledgeBase) { + for (final Statement statement : this.knowledgeBase) { statement.accept(visitor); } - if (edbPredicates.isEmpty() && aliasedEdbPredicates.isEmpty()) { + if (this.edbPredicates.isEmpty() && this.aliasedEdbPredicates.isEmpty()) { LOGGER.warn("No facts have been provided."); } @@ -363,7 +368,7 @@ void loadKnowledgeBase() throws IOException { this.reasonerState = ReasonerState.KB_LOADED; // if there are no rules, then materialisation state is complete - this.correctness = rules.isEmpty() ? Correctness.SOUND_AND_COMPLETE : Correctness.SOUND_BUT_INCOMPLETE; + this.correctness = this.rules.isEmpty() ? Correctness.SOUND_AND_COMPLETE : Correctness.SOUND_BUT_INCOMPLETE; LOGGER.info("Finished loading knowledge base."); } @@ -403,15 +408,15 @@ int addDataSourceConfigurationString(DataSource dataSource, Predicate predicate, * Checks if the loaded external data sources do in fact contain data of the * correct arity. * - * @throws IncompatiblePredicateArityException to indicate a problem - * (non-checked exception) + * @throws IncompatiblePredicateArityException + * to indicate a problem (non-checked exception) */ void validateDataSourcePredicateArities() throws IncompatiblePredicateArityException { - for (final Predicate predicate : edbPredicates.keySet()) { - validateDataSourcePredicateArity(predicate, edbPredicates.get(predicate).getDataSource()); + for (final Predicate predicate : this.edbPredicates.keySet()) { + validateDataSourcePredicateArity(predicate, this.edbPredicates.get(predicate).getDataSource()); } - for (final DataSourceDeclaration dataSourceDeclaration : aliasesForEdbPredicates.keySet()) { - validateDataSourcePredicateArity(aliasesForEdbPredicates.get(dataSourceDeclaration), + for (final DataSourceDeclaration dataSourceDeclaration : this.aliasesForEdbPredicates.keySet()) { + validateDataSourcePredicateArity(this.aliasesForEdbPredicates.get(dataSourceDeclaration), dataSourceDeclaration.getDataSource()); } } @@ -451,16 +456,19 @@ void loadInMemoryDataSource(DataSource dataSource, Predicate predicate) { * Checks if the loaded external data for a given source does in fact contain * data of the correct arity for the given predidate. * - * @param predicate the predicate for which data is loaded - * @param dataSource the data source used + * @param predicate + * the predicate for which data is loaded + * @param dataSource + * the data source used * - * @throws IncompatiblePredicateArityException to indicate a problem - * (non-checked exception) + * @throws IncompatiblePredicateArityException + * to indicate a problem (non-checked exception) */ void validateDataSourcePredicateArity(Predicate predicate, DataSource dataSource) throws IncompatiblePredicateArityException { - if (dataSource == null) + if (dataSource == null) { return; + } try { final int dataSourcePredicateArity = this.vLog .getPredicateArity(ModelToVLogConverter.toVLogPredicate(predicate)); @@ -475,17 +483,17 @@ void validateDataSourcePredicateArity(Predicate predicate, DataSource dataSource } void loadFacts() { - for (final Predicate predicate : directEdbFacts.keySet()) { + for (final Predicate predicate : this.directEdbFacts.keySet()) { Predicate aliasPredicate; - if (edbPredicates.containsKey(predicate)) { + if (this.edbPredicates.containsKey(predicate)) { aliasPredicate = predicate; } else { - aliasPredicate = aliasesForEdbPredicates.get(new LocalFactsDataSourceDeclaration(predicate)); + aliasPredicate = this.aliasesForEdbPredicates.get(new LocalFactsDataSourceDeclaration(predicate)); } try { final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(aliasPredicate); final String[][] vLogPredicateTuples = ModelToVLogConverter - .toVLogFactTuples(directEdbFacts.get(predicate)); + .toVLogFactTuples(this.directEdbFacts.get(predicate)); this.vLog.addData(vLogPredicateName, vLogPredicateTuples); if (LOGGER.isDebugEnabled()) { for (final String[] tuple : vLogPredicateTuples) { @@ -499,7 +507,7 @@ void loadFacts() { } void loadRules() { - final karmaresearch.vlog.Rule[] vLogRuleArray = ModelToVLogConverter.toVLogRuleArray(rules); + final karmaresearch.vlog.Rule[] vLogRuleArray = ModelToVLogConverter.toVLogRuleArray(this.rules); final karmaresearch.vlog.VLog.RuleRewriteStrategy vLogRuleRewriteStrategy = ModelToVLogConverter .toVLogRuleRewriteStrategy(this.ruleRewriteStrategy); try { @@ -616,6 +624,8 @@ public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final St } catch (final NotStartedException e) { throw new RuntimeException("Inconsistent reasoner state!", e); } catch (final NonExistingPredicateException e1) { + LOGGER.warn("Query uses predicate " + query.getPredicate() + + " that does not occur in the knowledge base. Answer must be empty!"); throw new IllegalArgumentException(MessageFormat.format( "The query predicate does not occur in the loaded Knowledge Base: {0}!", query.getPredicate()), e1); } @@ -624,6 +634,42 @@ public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final St return this.correctness; } + @Override + public Correctness writeInferences(OutputStream stream) throws IOException { + validateNotClosed(); + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { + throw new ReasonerStateException(this.reasonerState, + "Obtaining inferences is not alowed before reasoner is loaded!"); + } + final Set toBeQueriedHeadPredicates = getKnolwedgeBasePredicates(); + + for (final Predicate predicate : toBeQueriedHeadPredicates) { + final PositiveLiteral queryAtom = getQueryAtom(predicate); + final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(queryAtom); + try (final TermQueryResultIterator answers = this.vLog.query(vLogAtom, true, false)) { + while (answers.hasNext()) { + final karmaresearch.vlog.Term[] vlogTerms = answers.next(); + final List termList = VLogToModelConverter.toTermList(vlogTerms); + stream.write(Serializer.getFactString(predicate, termList).getBytes()); + } + } catch (final NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state.", e); + } catch (final NonExistingPredicateException e1) { + throw new RuntimeException("Inconsistent knowledge base state.", e1); + } + } + + logWarningOnCorrectness(); + return this.correctness; + } + + @Override + public Correctness writeInferences(String filePath) throws FileNotFoundException, IOException { + try (OutputStream stream = new FileOutputStream(filePath)) { + return writeInferences(stream); + } + } + private void logWarningOnCorrectness() { if (this.correctness != Correctness.SOUND_AND_COMPLETE) { LOGGER.warn("Query answers may be {} with respect to the current Knowledge Base!", this.correctness); @@ -706,25 +752,6 @@ public boolean isMFC() { return checkCyclic.equals(CyclicCheckResult.CYCLIC); } - private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) { - validateNotClosed(); - if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { - try { - load(); - } catch (IOException e) { // FIXME: quick fix for https://github.com/knowsys/vlog4j/issues/128 - throw new RuntimeException(e); - } - } - - CyclicCheckResult checkCyclic; - try { - checkCyclic = this.vLog.checkCyclic(acyclNotion.name()); - } catch (final NotStartedException e) { - throw new RuntimeException(e.getMessage(), e); // should be impossible - } - return checkCyclic.equals(CyclicCheckResult.NON_CYCLIC); - } - @Override public CyclicityResult checkForCycles() { final boolean acyclic = isJA() || isRJA() || isMFA() || isRMFA(); @@ -771,6 +798,49 @@ public void onStatementsRemoved(List statementsRemoved) { updateCorrectnessOnStatementsRemoved(); } + Set getKnolwedgeBasePredicates() { + final Set toBeQueriedHeadPredicates = new HashSet<>(); + for (final Rule rule : this.knowledgeBase.getRules()) { + for (final Literal literal : rule.getHead()) { + toBeQueriedHeadPredicates.add(literal.getPredicate()); + } + } + for (final DataSourceDeclaration dataSourceDeclaration : this.knowledgeBase.getDataSourceDeclarations()) { + toBeQueriedHeadPredicates.add(dataSourceDeclaration.getPredicate()); + } + for (final Fact fact : this.knowledgeBase.getFacts()) { + toBeQueriedHeadPredicates.add(fact.getPredicate()); + } + return toBeQueriedHeadPredicates; + } + + private PositiveLiteral getQueryAtom(final Predicate predicate) { + final List toBeGroundedVariables = new ArrayList<>(predicate.getArity()); + for (int i = 0; i < predicate.getArity(); i++) { + toBeGroundedVariables.add(Expressions.makeUniversalVariable("X" + i)); + } + return Expressions.makePositiveLiteral(predicate, toBeGroundedVariables); + } + + private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) { + validateNotClosed(); + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { + try { + load(); + } catch (final IOException e) { // FIXME: quick fix for https://github.com/knowsys/vlog4j/issues/128 + throw new RuntimeException(e); + } + } + + CyclicCheckResult checkCyclic; + try { + checkCyclic = this.vLog.checkCyclic(acyclNotion.name()); + } catch (final NotStartedException e) { + throw new RuntimeException(e.getMessage(), e); // should be impossible + } + return checkCyclic.equals(CyclicCheckResult.NON_CYCLIC); + } + private void updateReasonerToKnowledgeBaseChanged() { if (this.reasonerState.equals(ReasonerState.KB_LOADED) || this.reasonerState.equals(ReasonerState.MATERIALISED)) { @@ -813,64 +883,4 @@ void setReasonerState(ReasonerState reasonerState) { this.reasonerState = reasonerState; } - @Override - public Correctness writeInferences(OutputStream stream) throws IOException { - validateNotClosed(); - if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { - throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); - } - Set toBeQueriedHeadPredicates = new HashSet(); - for (Rule rule : this.knowledgeBase.getRules()) { - for (Literal literal : rule.getHead()) { - toBeQueriedHeadPredicates.add(literal.getPredicate()); - } - } - for (DataSourceDeclaration dataSourceDeclaration : this.knowledgeBase.getDataSourceDeclarations()) { - toBeQueriedHeadPredicates.add(dataSourceDeclaration.getPredicate()); - } - for (Fact fact : this.knowledgeBase.getFacts()) { - toBeQueriedHeadPredicates.add(fact.getPredicate()); - } - - for (Predicate predicate : toBeQueriedHeadPredicates) { - List toBeGroundedVariables = new ArrayList(); - for (int i = 0; i < predicate.getArity(); i++) { - toBeGroundedVariables.add(Expressions.makeUniversalVariable("X" + i)); - } - final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter - .toVLogAtom(Expressions.makePositiveLiteral(predicate, toBeGroundedVariables)); - try { - final TermQueryResultIterator answers = this.vLog.query(vLogAtom, true, false); - while (answers.hasNext()) { - final karmaresearch.vlog.Term[] vlogTerms = answers.next(); - final List termList = VLogToModelConverter.toTermList(vlogTerms); - try { - stream.write(Serializer.getFactString(predicate, termList).getBytes()); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); - } catch (final NonExistingPredicateException e1) { - LOGGER.warn("Query uses predicate " + predicate - + " that does not occur in the knowledge base. Answer must be empty!"); - throw new RuntimeException("Inconsistent knowledge base state.", e1); - } - - } - logWarningOnCorrectness(); - return this.correctness; - - } - - @Override - public Correctness writeInferences(String filePath) throws IOException { - try (OutputStream stream = new FileOutputStream(filePath)) { - writeInferences(stream); - } - logWarningOnCorrectness(); - return this.correctness; - } - } From 90bee585ddef24c590599e7513e61c3b8999f552 Mon Sep 17 00:00:00 2001 From: alloka Date: Mon, 17 Feb 2020 16:37:13 +0100 Subject: [PATCH 0486/1003] moved unit test to reasoner --- .../VlogReasonerWriteInferencesTest.java | 101 ++++++++++++++++++ .../vlog4j/syntax/parser/RuleParserTest.java | 42 -------- 2 files changed, 101 insertions(+), 42 deletions(-) create mode 100644 vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VlogReasonerWriteInferencesTest.java diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VlogReasonerWriteInferencesTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VlogReasonerWriteInferencesTest.java new file mode 100644 index 000000000..73af579c0 --- /dev/null +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VlogReasonerWriteInferencesTest.java @@ -0,0 +1,101 @@ +package org.semanticweb.vlog4j.core.reasoner.implementation; + +import static org.junit.Assert.assertEquals; + +import java.io.BufferedReader; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.StringReader; +import java.util.ArrayList; +import java.util.List; +import org.junit.Test; +import org.semanticweb.vlog4j.core.model.api.AbstractConstant; +import org.semanticweb.vlog4j.core.model.api.Conjunction; +import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.UniversalVariable; +import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.Reasoner; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +public class VlogReasonerWriteInferencesTest { + final Constant c = Expressions.makeAbstractConstant("http://example.org/c"); + final Fact fact = Expressions.makeFact("http://example.org/s", c); + final AbstractConstant dresdenConst = Expressions.makeAbstractConstant("dresden"); + final Predicate locatedInPred = Expressions.makePredicate("LocatedIn", 2); + final Predicate addressPred = Expressions.makePredicate("address", 4); + final Predicate universityPred = Expressions.makePredicate("university", 2); + final UniversalVariable varX = Expressions.makeUniversalVariable("X"); + final UniversalVariable varY = Expressions.makeUniversalVariable("Y"); + final PositiveLiteral pl1 = Expressions.makePositiveLiteral(locatedInPred, varX, varY); + final PositiveLiteral pl2 = Expressions.makePositiveLiteral("location", varX, varY); + final PositiveLiteral pl3 = Expressions.makePositiveLiteral(addressPred, varX, + Expressions.makeExistentialVariable("Y"), Expressions.makeExistentialVariable("Z"), + Expressions.makeExistentialVariable("Q")); + final PositiveLiteral pl4 = Expressions.makePositiveLiteral(locatedInPred, Expressions.makeExistentialVariable("Q"), + Expressions.makeUniversalVariable("F")); + final PositiveLiteral pl5 = Expressions.makePositiveLiteral(universityPred, varX, + Expressions.makeUniversalVariable("F")); + final Conjunction conjunction = Expressions.makePositiveConjunction(pl3, pl4); + final Rule rule1 = Expressions.makeRule(pl1, pl2); + final Rule rule2 = Expressions.makeRule(conjunction, Expressions.makeConjunction(pl5)); + final Fact f1 = Expressions.makeFact(locatedInPred, Expressions.makeAbstractConstant("Egypt"), + Expressions.makeAbstractConstant("Africa")); + final Fact f2 = Expressions.makeFact(addressPred, Expressions.makeAbstractConstant("TSH"), + Expressions.makeAbstractConstant("Pragerstraße13"), Expressions.makeAbstractConstant("01069"), + dresdenConst); + final Fact f3 = Expressions.makeFact("city", dresdenConst); + final Fact f4 = Expressions.makeFact("country", Expressions.makeAbstractConstant("germany")); + final Fact f5 = Expressions.makeFact(universityPred, Expressions.makeAbstractConstant("tudresden"), + Expressions.makeAbstractConstant("germany")); + final InMemoryDataSource locations = new InMemoryDataSource(2, 1); + + @Test + public void testWriteInferences() throws IOException { + KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(fact); + kb.addStatements(rule1, rule2, f1, f2, f3, f4, f5); + locations.addTuple("dresden", "germany"); + kb.addStatement(new DataSourceDeclarationImpl(Expressions.makePredicate("location", 2), locations)); + List inferences = new ArrayList(); + try (final Reasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + ByteArrayOutputStream stream = new ByteArrayOutputStream(); + reasoner.writeInferences(stream); + stream.flush(); + try (BufferedReader input = new BufferedReader(new StringReader(stream.toString()))) { + String factString = ""; + while ((factString = input.readLine()) != null) { + inferences.add(factString); + } + + } + assertEquals(10, inferences.size()); + } + + } +} diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 61353b99d..8a75123bd 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -23,13 +23,8 @@ import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; -import java.io.BufferedReader; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.StringReader; import java.util.ArrayList; import java.util.Arrays; -import java.util.List; import org.junit.Test; import org.mockito.ArgumentMatchers; @@ -44,12 +39,7 @@ import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.InMemoryDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; import org.semanticweb.vlog4j.parser.DatatypeConstantHandler; import org.semanticweb.vlog4j.parser.ParserConfiguration; import org.semanticweb.vlog4j.parser.ParsingException; @@ -449,36 +439,4 @@ public void testCustomDatatype() throws ParsingException { assertEquals(constant, result); } - @Test - public void testWriteInferences() throws ParsingException, IOException { - KnowledgeBase kb = new KnowledgeBase(); - final InMemoryDataSource locations = new InMemoryDataSource(2, 1); - locations.addTuple("dresden", "germany"); - kb.addStatement(fact); - final String rules = "locatedIn(Egypt,Africa). \n" // - + "address(TSH, \"Pragerstraße 13\", \"01069\", dresden). \n" // - + "city(dresden). \n" // - + "country(germany). \n" // - + "university(tudresden, germany). \n" // - + "locatedIn(?X,?Y) :- location(?X,?Y) . \n" // - + "address(?Uni, !Street, !ZIP, !City), locatedIn(!City, ?Country) :- university(?Uni, ?Country) . \n"; - RuleParser.parseInto(kb, rules); - kb.addStatement(new DataSourceDeclarationImpl(Expressions.makePredicate("location", 2), locations)); - List inferences = new ArrayList(); - try (final Reasoner reasoner = new VLogReasoner(kb)) { - reasoner.reason(); - ByteArrayOutputStream stream = new ByteArrayOutputStream(); - reasoner.writeInferences(stream); - stream.flush(); - try (BufferedReader input = new BufferedReader(new StringReader(stream.toString()))) { - String factString = ""; - while ((factString = input.readLine()) != null) { - inferences.add(factString); - } - - } - assertEquals(10, inferences.size()); - } - - } } From 0b3f2303fe40ec183f3d313f8c41fb0755c67b0a Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Mon, 17 Feb 2020 17:43:53 +0100 Subject: [PATCH 0487/1003] license header VLogKnowledgeBase --- LICENSE.txt | 402 +++++++++--------- .../implementation/VLogKnowledgeBase.java | 22 +- 2 files changed, 222 insertions(+), 202 deletions(-) diff --git a/LICENSE.txt b/LICENSE.txt index 261eeb9e9..29f81d812 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -1,201 +1,201 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java index 73f4adf18..232ecafe9 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java @@ -1,5 +1,25 @@ package org.semanticweb.vlog4j.core.reasoner.implementation; +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2020 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import java.util.ArrayList; import java.util.Arrays; import java.util.Formatter; @@ -279,4 +299,4 @@ public boolean equals(Object obj) { } } -} \ No newline at end of file +} From 43b19b211a4a4f93d51037099dfc0f26b7937319 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 18 Feb 2020 10:07:42 +0100 Subject: [PATCH 0488/1003] fixes #157 * Reasoner#exportQueryAnswersToCsv for an unknown (not in KB) predicate should not throw exception, but log a warning --- .../reasoner/implementation/VLogReasoner.java | 5 +-- .../implementation/VLogReasonerCsvOutput.java | 34 +++++++++---------- 2 files changed, 18 insertions(+), 21 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 482f36b90..989f61ff1 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -4,7 +4,6 @@ import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; -import java.text.MessageFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; @@ -428,9 +427,7 @@ public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final St throw new RuntimeException("Inconsistent reasoner state!", e); } catch (final NonExistingPredicateException e1) { LOGGER.warn("Query uses predicate " + query.getPredicate() - + " that does not occur in the knowledge base. Answer must be empty!"); - throw new IllegalArgumentException(MessageFormat.format( - "The query predicate does not occur in the loaded Knowledge Base: {0}!", query.getPredicate()), e1); + + " that does not occur in the knowledge base. Answers are therefore empty."); } logWarningOnCorrectness(); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvOutput.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvOutput.java index 85b136306..7f1c62838 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvOutput.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvOutput.java @@ -21,9 +21,12 @@ */ import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; import java.util.Arrays; import java.util.List; @@ -37,6 +40,8 @@ public class VLogReasonerCsvOutput { + private final static String nonExistingFilePath = FileDataSourceTestUtils.OUTPUT_FOLDER + "empty.csv"; + @Test public void testEDBQuerySameConstantSubstitutesSameVariableName() throws IOException { final String predicate = "p"; @@ -85,9 +90,8 @@ public void testEDBQuerySameConstantSubstitutesSameVariableName() throws IOExcep } - @Test(expected = IllegalArgumentException.class) + @Test public void testExportQueryEmptyKnowledgeBaseBeforeReasoningIncludeBlanks() throws IOException { - final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("p", Expressions.makeUniversalVariable("?x"), Expressions.makeUniversalVariable("?y")); @@ -95,14 +99,13 @@ public void testExportQueryEmptyKnowledgeBaseBeforeReasoningIncludeBlanks() thro try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); - final String emptyFilePath = FileDataSourceTestUtils.OUTPUT_FOLDER + "empty.csv"; - reasoner.exportQueryAnswersToCsv(queryAtom, emptyFilePath, true); + reasoner.exportQueryAnswersToCsv(queryAtom, nonExistingFilePath, true); } + assertFalse(Files.exists(Paths.get(nonExistingFilePath))); } - @Test(expected = IllegalArgumentException.class) + @Test public void testExportQueryEmptyKnowledgeBaseBeforeReasoningExcludeBlanks() throws IOException { - final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("p", Expressions.makeUniversalVariable("?x"), Expressions.makeUniversalVariable("?y")); @@ -110,15 +113,14 @@ public void testExportQueryEmptyKnowledgeBaseBeforeReasoningExcludeBlanks() thro try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); - final String emptyFilePath = FileDataSourceTestUtils.OUTPUT_FOLDER + "empty.csv"; - reasoner.exportQueryAnswersToCsv(queryAtom, emptyFilePath, false); + reasoner.exportQueryAnswersToCsv(queryAtom, nonExistingFilePath, false); } + assertFalse(Files.exists(Paths.get(nonExistingFilePath))); } - @Test(expected = IllegalArgumentException.class) + @Test public void testExportQueryEmptyKnowledgeBaseAfterReasoningIncludeBlanks() throws IOException { - final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("p", Expressions.makeUniversalVariable("?x"), Expressions.makeUniversalVariable("?y")); @@ -128,26 +130,24 @@ public void testExportQueryEmptyKnowledgeBaseAfterReasoningIncludeBlanks() throw reasoner.load(); reasoner.reason(); - final String emptyFilePath = FileDataSourceTestUtils.OUTPUT_FOLDER + "empty.csv"; - reasoner.exportQueryAnswersToCsv(queryAtom, emptyFilePath, true); + reasoner.exportQueryAnswersToCsv(queryAtom, nonExistingFilePath, true); } + assertFalse(Files.exists(Paths.get(nonExistingFilePath))); } - @Test(expected = IllegalArgumentException.class) public void testExportQueryEmptyKnowledgeBaseAfterReasoningExcludeBlanks() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("p", Expressions.makeUniversalVariable("?x"), Expressions.makeUniversalVariable("?y")); - final KnowledgeBase kb = new KnowledgeBase(); - try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); reasoner.reason(); - final String emptyFilePath = FileDataSourceTestUtils.OUTPUT_FOLDER + "empty.csv"; - reasoner.exportQueryAnswersToCsv(queryAtom, emptyFilePath, false); + reasoner.exportQueryAnswersToCsv(queryAtom, nonExistingFilePath, false); } + assertFalse(Files.exists(Paths.get(nonExistingFilePath))); } } From 97d2b5935a1b351983bedc58715c77887bcb46b3 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Mon, 24 Feb 2020 15:21:30 +0100 Subject: [PATCH 0489/1003] fix #158 by creating new AbstractConstant --- .../core/model/implementation/Serializer.java | 19 ++++---- .../implementation/VLogToModelConverter.java | 43 ++++++++++------- .../VLogToModelConverterTest.java | 47 ++++++++++++------- 3 files changed, 65 insertions(+), 44 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 09b6f7981..a9f7006f4 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -51,6 +51,7 @@ * */ public final class Serializer { + private static final String NEW_LINE = "\n"; public static final String STATEMENT_SEPARATOR = " ."; public static final String COMMA = ", "; public static final String NEGATIVE_IDENTIFIER = "~"; @@ -62,7 +63,7 @@ public final class Serializer { public static final String OPENING_BRACKET = "["; public static final String CLOSING_BRACKET = "]"; public static final String RULE_SEPARATOR = " :- "; - public static final String AT = "@"; + public static final char AT = '@'; public static final String DATA_SOURCE = "@source "; public static final String CSV_FILE_DATA_SOURCE = "load-csv"; public static final String RDF_FILE_DATA_SOURCE = "load-rdf"; @@ -70,9 +71,9 @@ public final class Serializer { public static final String DATA_SOURCE_SEPARATOR = ": "; public static final String COLON = ":"; public static final String DOUBLE_CARET = "^^"; - public static final String LESS_THAN = "<"; - public static final String MORE_THAN = ">"; - public static final String QUOTE = "\""; + public static final char LESS_THAN = '<'; + public static final char MORE_THAN = '>'; + public static final char QUOTE = '"'; public static final String REGEX_DOUBLE = "^[-+]?[0-9]+[.]?[0-9]*([eE][-+]?[0-9]+)?$"; public static final String REGEX_INTEGER = "^[-+]?\\d+$"; @@ -367,7 +368,7 @@ public static String getString(final String string) { */ private static String escape(final String string) { return string.replace("\\", "\\\\").replace("\"", "\\\"").replace("\t", "\\t").replace("\b", "\\b") - .replace("\n", "\\n").replace("\r", "\\r").replace("\f", "\\f"); + .replace(NEW_LINE, "\\n").replace("\r", "\\r").replace("\f", "\\f"); // don't touch single quotes here since we only construct double-quoted strings } @@ -380,15 +381,15 @@ private static String addAngleBrackets(final String string) { } public static String getFactString(Predicate predicate, List terms) { - return getString(predicate, terms) + STATEMENT_SEPARATOR + "\n"; + return getString(predicate, terms) + STATEMENT_SEPARATOR + NEW_LINE; } public static String getString(Predicate predicate, List terms) { - StringBuilder stringBuilder = new StringBuilder(""); - stringBuilder.append(getIRIString(predicate.getName())).append(OPENING_PARENTHESIS); + final StringBuilder stringBuilder = new StringBuilder(getIRIString(predicate.getName())); + stringBuilder.append(OPENING_PARENTHESIS); boolean first = true; - for (Term term : terms) { + for (final Term term : terms) { if (first) { first = false; } else { diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverter.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverter.java index 27fd1a2d5..3be4fdebf 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverter.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverter.java @@ -26,10 +26,11 @@ import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.DatatypeConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.LanguageStringConstantImpl; +import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; /** * Utility class with static methods for converting from VLog internal model @@ -63,8 +64,8 @@ static QueryResult toQueryResult(karmaresearch.vlog.Term[] vLogQueryResult) { * in given {@code vLogTerms} at the same position. */ static List toTermList(karmaresearch.vlog.Term[] vLogTerms) { - List terms = new ArrayList<>(vLogTerms.length); - for (karmaresearch.vlog.Term vLogTerm : vLogTerms) { + final List terms = new ArrayList<>(vLogTerms.length); + for (final karmaresearch.vlog.Term vLogTerm : vLogTerms) { terms.add(toTerm(vLogTerm)); } return terms; @@ -79,7 +80,7 @@ static List toTermList(karmaresearch.vlog.Term[] vLogTerms) { * {@code vLogTerm} and of the corresponding type. */ static Term toTerm(karmaresearch.vlog.Term vLogTerm) { - String name = vLogTerm.getName(); + final String name = vLogTerm.getName(); switch (vLogTerm.getTermType()) { case CONSTANT: return toConstant(name); @@ -100,24 +101,32 @@ static Term toTerm(karmaresearch.vlog.Term vLogTerm) { * @return {@link Constant} object */ private static Constant toConstant(String vLogConstantName) { - if (vLogConstantName.charAt(0) == '<' && vLogConstantName.charAt(vLogConstantName.length() - 1) == '>') { + final Constant constant; + if (vLogConstantName.charAt(0) == Serializer.LESS_THAN + && vLogConstantName.charAt(vLogConstantName.length() - 1) == Serializer.MORE_THAN) { // strip <> off of IRIs - return new AbstractConstantImpl(vLogConstantName.substring(1, vLogConstantName.length() - 1)); - } else if (vLogConstantName.charAt(0) == '"') { - if (vLogConstantName.charAt(vLogConstantName.length() - 1) == '>') { - int startTypeIdx = vLogConstantName.lastIndexOf('<', vLogConstantName.length() - 2); - String datatype = vLogConstantName.substring(startTypeIdx + 1, vLogConstantName.length() - 1); - String lexicalValue = vLogConstantName.substring(1, startTypeIdx - 3); - return new DatatypeConstantImpl(lexicalValue, datatype); + constant = new AbstractConstantImpl(vLogConstantName.substring(1, vLogConstantName.length() - 1)); + } else if (vLogConstantName.charAt(0) == Serializer.QUOTE) { + if (vLogConstantName.charAt(vLogConstantName.length() - 1) == Serializer.MORE_THAN) { + final int startTypeIdx = vLogConstantName.lastIndexOf(Serializer.LESS_THAN, + vLogConstantName.length() - 2); + final String datatype = vLogConstantName.substring(startTypeIdx + 1, vLogConstantName.length() - 1); + final String lexicalValue = vLogConstantName.substring(1, startTypeIdx - 3); + constant = new DatatypeConstantImpl(lexicalValue, datatype); } else { - int startTypeIdx = vLogConstantName.lastIndexOf('@', vLogConstantName.length() - 2); - String languageTag = vLogConstantName.substring(startTypeIdx + 1, vLogConstantName.length()); - String string = vLogConstantName.substring(1, startTypeIdx - 1); - return new LanguageStringConstantImpl(string, languageTag); + final int startTypeIdx = vLogConstantName.lastIndexOf(Serializer.AT, vLogConstantName.length() - 2); + if (startTypeIdx > -1) { + final String languageTag = vLogConstantName.substring(startTypeIdx + 1, vLogConstantName.length()); + final String string = vLogConstantName.substring(1, startTypeIdx - 1); + constant = new LanguageStringConstantImpl(string, languageTag); + } else { + constant = new AbstractConstantImpl(vLogConstantName); + } } } else { - return new AbstractConstantImpl(vLogConstantName); + constant = new AbstractConstantImpl(vLogConstantName); } + return constant; } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverterTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverterTest.java index 0a80eb198..64dd2469d 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverterTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverterTest.java @@ -19,10 +19,11 @@ * limitations under the License. * #L% */ - -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; import org.junit.Test; +import org.semanticweb.vlog4j.core.model.api.AbstractConstant; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.DatatypeConstantImpl; @@ -33,51 +34,61 @@ public class VLogToModelConverterTest { @Test public void testAbstractConstantConversion() { - karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, "c"); - Term vLog4jTerm = new AbstractConstantImpl("c"); - Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); + final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, "c"); + final Term vLog4jTerm = new AbstractConstantImpl("c"); + final Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); assertEquals(vLog4jTerm, convertedTerm); } @Test public void testAbstractConstantIriConversion() { - karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, + final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, ""); - Term vLog4jTerm = new AbstractConstantImpl("http://example.org/test"); - Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); + final Term vLog4jTerm = new AbstractConstantImpl("http://example.org/test"); + final Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); assertEquals(vLog4jTerm, convertedTerm); } @Test public void testDatatypeConstantConversion() { - karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, + final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, "\"a\"^^"); - Term vLog4jTerm = new DatatypeConstantImpl("a", "http://example.org/test"); - Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); + final Term vLog4jTerm = new DatatypeConstantImpl("a", "http://example.org/test"); + final Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); assertEquals(vLog4jTerm, convertedTerm); } @Test public void testLanguageStringConversion() { - karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, + final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, "\"Test\"@en"); - Term vLog4jTerm = new LanguageStringConstantImpl("Test", "en"); - Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); + final Term vLog4jTerm = new LanguageStringConstantImpl("Test", "en"); + final Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); assertEquals(vLog4jTerm, convertedTerm); } @Test public void testNamedNullConversion() { - karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, "_123"); - Term vLog4jTerm = new NamedNullImpl("_123"); - Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); + final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, "_123"); + final Term vLog4jTerm = new NamedNullImpl("_123"); + final Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); assertEquals(vLog4jTerm, convertedTerm); } @Test(expected = IllegalArgumentException.class) public void testVariableConversion() { - karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, "X"); + final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, "X"); VLogToModelConverter.toTerm(vLogTerm); } + @Test + public void testAbstractConstantContainingQuoteExpression() { + final String constName = "\""; + final Term convertedTerm = VLogToModelConverter + .toTerm(new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, constName)); + assertTrue(convertedTerm.isConstant()); + assertTrue(convertedTerm instanceof AbstractConstant); + assertEquals(constName, convertedTerm.getName()); + } + } From 989dd9ba0cf9afee23078a61eaa82af77fd570cf Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Tue, 25 Feb 2020 10:48:53 +0100 Subject: [PATCH 0490/1003] add Correctness in queryAnswersSize --- .../core/reasoner/QueryAnswersSize.java | 64 ++++++ .../vlog4j/core/reasoner/Reasoner.java | 9 +- .../reasoner/implementation/VLogReasoner.java | 14 +- .../implementation/QueryAnswerSizeTest.java | 206 +++++++++--------- 4 files changed, 183 insertions(+), 110 deletions(-) create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswersSize.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswersSize.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswersSize.java new file mode 100644 index 000000000..465d9bff6 --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswersSize.java @@ -0,0 +1,64 @@ +package org.semanticweb.vlog4j.core.reasoner; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * + * @author Larry González + * + */ +public class QueryAnswersSize { + + final Correctness correctness; + final long size; + + public QueryAnswersSize(Correctness correctness, int size) { + this.correctness = correctness; + this.size = size; + } + + public QueryAnswersSize(Correctness correctness, long size) { + this.correctness = correctness; + this.size = size; + } + + /** + * Returns the correctness of the query result. + *
              + *
            • If {@link Correctness#SOUND_AND_COMPLETE}, the query results are + * guaranteed to be correct.
            • + *
            • If {@link Correctness#SOUND_BUT_INCOMPLETE}, the results are guaranteed + * to be sound, but may be incomplete.
            • + *
            • If {@link Correctness#INCORRECT}, the results may be incomplete, and some + * results may be unsound. + *
            + * + * @return query result correctness + */ + public Correctness getCorrectness() { + return this.correctness; + } + + public long getSize() { + return this.size; + } + +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index cc5b62fe3..569ec414a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -337,14 +337,13 @@ static Reasoner getInstance() { * @return queryAnswerSize(query, true), the number of facts in the extension of * the query. */ - long queryAnswerSize(PositiveLiteral query); + QueryAnswersSize queryAnswerSize(PositiveLiteral query); // TODO add examples to query javadoc /** * Evaluates an atomic ({@code query}), and returns the number of implicit facts - * loaded into the reasoner and the number of explicit facts materialised by - * the reasoner. - *
            + * loaded into the reasoner and the number of explicit facts materialised by the + * reasoner.
            * An answer to the query is the terms a fact that matches the {@code query}: * the fact predicate is the same as the {@code query} predicate, the * {@link TermType#CONSTANT} terms of the {@code query} appear in the answer @@ -390,7 +389,7 @@ static Reasoner getInstance() { * named individuals). * @return number of facts in the extension of the query. */ - long queryAnswerSize(PositiveLiteral query, boolean includeNulls); + QueryAnswersSize queryAnswerSize(PositiveLiteral query, boolean includeNulls); // TODO add examples to query javadoc /** diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 383dbfe66..16c028194 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -35,6 +35,7 @@ import org.semanticweb.vlog4j.core.reasoner.CyclicityResult; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.LogLevel; +import org.semanticweb.vlog4j.core.reasoner.QueryAnswersSize; import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.ReasonerState; @@ -595,19 +596,22 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNul } @Override - public long queryAnswerSize(PositiveLiteral query) { + public QueryAnswersSize queryAnswerSize(PositiveLiteral query) { return queryAnswerSize(query, true); } @Override - public long queryAnswerSize(PositiveLiteral query, boolean includeNulls) { + public QueryAnswersSize queryAnswerSize(PositiveLiteral query, boolean includeNulls) { validateNotClosed(); + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { + throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); + } Validate.notNull(query, "Query atom must not be null!"); final boolean filterBlanks = !includeNulls; final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); - long result = -1; + long result; try { result = this.vLog.querySize(vLogAtom, true, filterBlanks); } catch (NotStartedException e) { @@ -615,9 +619,9 @@ public long queryAnswerSize(PositiveLiteral query, boolean includeNulls) { } catch (NonExistingPredicateException e) { LOGGER.warn("Query uses predicate " + query.getPredicate() + " that does not occur in the knowledge base. Answer must be empty!"); - return 0; + result = 0; } - return result; + return new QueryAnswersSize(this.correctness, result); } @Override diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java index b320f1e34..ec9639244 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java @@ -66,6 +66,8 @@ public class QueryAnswerSizeTest { private static final Fact factPc = Expressions.makeFact(predP, c); private static final Fact factPd = Expressions.makeFact(predP, d); + private static final Fact factQc = Expressions.makeFact(predQ, c); + private static final Fact factQd = Expressions.makeFact(predQ, d); private static final Fact factQe = Expressions.makeFact(predQ, e); private static final Fact factQf = Expressions.makeFact(predQ, f); @@ -79,9 +81,9 @@ public void noFactsnoRules() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.queryAnswerSize(Px, true)); - assertEquals(0, reasoner.queryAnswerSize(Qx, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); + assertEquals(0, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); } } @@ -91,9 +93,9 @@ public void noFactsUniversalRule() throws IOException { kb.addStatement(QxPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.queryAnswerSize(Px, true)); - assertEquals(0, reasoner.queryAnswerSize(Qx, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); + assertEquals(0, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); } } @@ -103,9 +105,9 @@ public void noFactsExistentialRule() throws IOException { kb.addStatement(RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.queryAnswerSize(Px, true)); - assertEquals(0, reasoner.queryAnswerSize(Qx, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); + assertEquals(0, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); } } @@ -115,9 +117,9 @@ public void pFactsNoRules() throws IOException { kb.addStatements(factPc, factPd); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px, true)); - assertEquals(0, reasoner.queryAnswerSize(Qx, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); + assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); } } @@ -127,12 +129,16 @@ public void pFactsUniversalRule() throws IOException { kb.addStatements(factPc, factPd, QxPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px, true)); - assertEquals(2, reasoner.queryAnswerSize(Px, false)); - assertEquals(2, reasoner.queryAnswerSize(Qx, true)); - assertEquals(2, reasoner.queryAnswerSize(Qx, false)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); + assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); + assertEquals(1, reasoner.queryAnswerSize(factPc, false).getSize()); + assertEquals(1, reasoner.queryAnswerSize(factPd, false).getSize()); + assertEquals(1, reasoner.queryAnswerSize(factQc, false).getSize()); + assertEquals(1, reasoner.queryAnswerSize(factQd, false).getSize()); } } @@ -142,15 +148,15 @@ public void pFactsExistentialRule() throws IOException { kb.addStatements(factPc, factPd, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px)); - assertEquals(2, reasoner.queryAnswerSize(Px, true)); - assertEquals(2, reasoner.queryAnswerSize(Px, false)); - assertEquals(2, reasoner.queryAnswerSize(Qx)); - assertEquals(2, reasoner.queryAnswerSize(Qx, true)); - assertEquals(0, reasoner.queryAnswerSize(Qx, false)); - assertEquals(2, reasoner.queryAnswerSize(Rxy)); - assertEquals(2, reasoner.queryAnswerSize(Rxy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); + assertEquals(2, reasoner.queryAnswerSize(Px).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Qx, false).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Rxy).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); } } @@ -160,15 +166,15 @@ public void qFactsUniversalRule() throws IOException { kb.addStatements(factQe, factQf, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.queryAnswerSize(Px)); - assertEquals(0, reasoner.queryAnswerSize(Px, true)); - assertEquals(0, reasoner.queryAnswerSize(Px, false)); - assertEquals(2, reasoner.queryAnswerSize(Qx)); - assertEquals(2, reasoner.queryAnswerSize(Qx, true)); - assertEquals(2, reasoner.queryAnswerSize(Qx, false)); - assertEquals(0, reasoner.queryAnswerSize(Rxy)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); + assertEquals(0, reasoner.queryAnswerSize(Px).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Px, false).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); } } @@ -178,15 +184,15 @@ public void qFactsExistentialRule() throws IOException { kb.addStatements(factQe, factQf, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.queryAnswerSize(Px)); - assertEquals(0, reasoner.queryAnswerSize(Px, true)); - assertEquals(0, reasoner.queryAnswerSize(Px, false)); - assertEquals(2, reasoner.queryAnswerSize(Qx)); - assertEquals(2, reasoner.queryAnswerSize(Qx, true)); - assertEquals(2, reasoner.queryAnswerSize(Qx, false)); - assertEquals(0, reasoner.queryAnswerSize(Rxy)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); + assertEquals(0, reasoner.queryAnswerSize(Px).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Px, false).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); } } @@ -196,15 +202,15 @@ public void pFactsQFactsUniversalRule() throws IOException { kb.addStatements(factPc, factPd, factQe, factQf, QxPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px)); - assertEquals(2, reasoner.queryAnswerSize(Px, true)); - assertEquals(2, reasoner.queryAnswerSize(Px, false)); - assertEquals(4, reasoner.queryAnswerSize(Qx)); - assertEquals(4, reasoner.queryAnswerSize(Qx, true)); - assertEquals(4, reasoner.queryAnswerSize(Qx, false)); - assertEquals(0, reasoner.queryAnswerSize(Rxy)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); + assertEquals(2, reasoner.queryAnswerSize(Px).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); + assertEquals(4, reasoner.queryAnswerSize(Qx).getSize()); + assertEquals(4, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(4, reasoner.queryAnswerSize(Qx, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); } } @@ -214,25 +220,25 @@ public void pFactsQFactsExistentialRule() throws IOException { kb.addStatements(factPc, factPd, factQe, factQf, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px)); - assertEquals(2, reasoner.queryAnswerSize(Px, true)); - assertEquals(2, reasoner.queryAnswerSize(Px, false)); - assertEquals(4, reasoner.queryAnswerSize(Qx)); - assertEquals(4, reasoner.queryAnswerSize(Qx, true)); - assertEquals(2, reasoner.queryAnswerSize(Qx, false)); - assertEquals(2, reasoner.queryAnswerSize(Rxy)); - assertEquals(2, reasoner.queryAnswerSize(Rxy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); - - assertEquals(1, reasoner.queryAnswerSize(Rdy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rey, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxd, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxe, true)); - - assertEquals(0, reasoner.queryAnswerSize(Rdy, false)); - assertEquals(0, reasoner.queryAnswerSize(Rey, false)); - assertEquals(0, reasoner.queryAnswerSize(Rxd, false)); - assertEquals(0, reasoner.queryAnswerSize(Rxe, false)); + assertEquals(2, reasoner.queryAnswerSize(Px).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); + assertEquals(4, reasoner.queryAnswerSize(Qx).getSize()); + assertEquals(4, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx, false).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Rxy).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); + + assertEquals(1, reasoner.queryAnswerSize(Rdy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rey, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxd, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxe, true).getSize()); + + assertEquals(0, reasoner.queryAnswerSize(Rdy, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rey, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxd, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxe, false).getSize()); } } @@ -242,25 +248,25 @@ public void pFactsQFactsExistentialAndUniversalRule() throws IOException { kb.addStatements(factPc, factPd, factQe, factQf, QxPx, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px)); - assertEquals(2, reasoner.queryAnswerSize(Px, true)); - assertEquals(2, reasoner.queryAnswerSize(Px, false)); - assertEquals(6, reasoner.queryAnswerSize(Qx)); - assertEquals(6, reasoner.queryAnswerSize(Qx, true)); - assertEquals(4, reasoner.queryAnswerSize(Qx, false)); - assertEquals(2, reasoner.queryAnswerSize(Rxy)); - assertEquals(2, reasoner.queryAnswerSize(Rxy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); - - assertEquals(1, reasoner.queryAnswerSize(Rdy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rey, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxd, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxe, true)); - - assertEquals(0, reasoner.queryAnswerSize(Rdy, false)); - assertEquals(0, reasoner.queryAnswerSize(Rey, false)); - assertEquals(0, reasoner.queryAnswerSize(Rxd, false)); - assertEquals(0, reasoner.queryAnswerSize(Rxe, false)); + assertEquals(2, reasoner.queryAnswerSize(Px).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); + assertEquals(6, reasoner.queryAnswerSize(Qx).getSize()); + assertEquals(6, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(4, reasoner.queryAnswerSize(Qx, false).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Rxy).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); + + assertEquals(1, reasoner.queryAnswerSize(Rdy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rey, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxd, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxe, true).getSize()); + + assertEquals(0, reasoner.queryAnswerSize(Rdy, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rey, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxd, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxe, false).getSize()); } } @@ -270,17 +276,17 @@ public void pFactsLiteralWithSameVariables() throws IOException { kb.addStatements(factPc, factPd, RxxRxyRyyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px, true)); - assertEquals(2, reasoner.queryAnswerSize(Px, false)); + assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); - assertEquals(4, reasoner.queryAnswerSize(Rxx, true)); - assertEquals(2, reasoner.queryAnswerSize(Rxx, false)); + assertEquals(4, reasoner.queryAnswerSize(Rxx, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Rxx, false).getSize()); - assertEquals(6, reasoner.queryAnswerSize(Rxy, true)); - assertEquals(2, reasoner.queryAnswerSize(Rxy, false)); + assertEquals(6, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Rxy, false).getSize()); - assertEquals(4, reasoner.queryAnswerSize(Ryy, true)); - assertEquals(2, reasoner.queryAnswerSize(Ryy, false)); + assertEquals(4, reasoner.queryAnswerSize(Ryy, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Ryy, false).getSize()); } } From d16d8afd92a1772399b35d99ff4f0de509084138 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Tue, 25 Feb 2020 10:48:53 +0100 Subject: [PATCH 0491/1003] add Correctness in queryAnswersSize; add test for facts --- .../core/reasoner/QueryAnswersSize.java | 64 ++++++ .../vlog4j/core/reasoner/Reasoner.java | 9 +- .../reasoner/implementation/VLogReasoner.java | 14 +- .../implementation/QueryAnswerSizeTest.java | 206 +++++++++--------- 4 files changed, 183 insertions(+), 110 deletions(-) create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswersSize.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswersSize.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswersSize.java new file mode 100644 index 000000000..465d9bff6 --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswersSize.java @@ -0,0 +1,64 @@ +package org.semanticweb.vlog4j.core.reasoner; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * + * @author Larry González + * + */ +public class QueryAnswersSize { + + final Correctness correctness; + final long size; + + public QueryAnswersSize(Correctness correctness, int size) { + this.correctness = correctness; + this.size = size; + } + + public QueryAnswersSize(Correctness correctness, long size) { + this.correctness = correctness; + this.size = size; + } + + /** + * Returns the correctness of the query result. + *
              + *
            • If {@link Correctness#SOUND_AND_COMPLETE}, the query results are + * guaranteed to be correct.
            • + *
            • If {@link Correctness#SOUND_BUT_INCOMPLETE}, the results are guaranteed + * to be sound, but may be incomplete.
            • + *
            • If {@link Correctness#INCORRECT}, the results may be incomplete, and some + * results may be unsound. + *
            + * + * @return query result correctness + */ + public Correctness getCorrectness() { + return this.correctness; + } + + public long getSize() { + return this.size; + } + +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index cc5b62fe3..569ec414a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -337,14 +337,13 @@ static Reasoner getInstance() { * @return queryAnswerSize(query, true), the number of facts in the extension of * the query. */ - long queryAnswerSize(PositiveLiteral query); + QueryAnswersSize queryAnswerSize(PositiveLiteral query); // TODO add examples to query javadoc /** * Evaluates an atomic ({@code query}), and returns the number of implicit facts - * loaded into the reasoner and the number of explicit facts materialised by - * the reasoner. - *
            + * loaded into the reasoner and the number of explicit facts materialised by the + * reasoner.
            * An answer to the query is the terms a fact that matches the {@code query}: * the fact predicate is the same as the {@code query} predicate, the * {@link TermType#CONSTANT} terms of the {@code query} appear in the answer @@ -390,7 +389,7 @@ static Reasoner getInstance() { * named individuals). * @return number of facts in the extension of the query. */ - long queryAnswerSize(PositiveLiteral query, boolean includeNulls); + QueryAnswersSize queryAnswerSize(PositiveLiteral query, boolean includeNulls); // TODO add examples to query javadoc /** diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 383dbfe66..16c028194 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -35,6 +35,7 @@ import org.semanticweb.vlog4j.core.reasoner.CyclicityResult; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.LogLevel; +import org.semanticweb.vlog4j.core.reasoner.QueryAnswersSize; import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.ReasonerState; @@ -595,19 +596,22 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNul } @Override - public long queryAnswerSize(PositiveLiteral query) { + public QueryAnswersSize queryAnswerSize(PositiveLiteral query) { return queryAnswerSize(query, true); } @Override - public long queryAnswerSize(PositiveLiteral query, boolean includeNulls) { + public QueryAnswersSize queryAnswerSize(PositiveLiteral query, boolean includeNulls) { validateNotClosed(); + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { + throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); + } Validate.notNull(query, "Query atom must not be null!"); final boolean filterBlanks = !includeNulls; final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); - long result = -1; + long result; try { result = this.vLog.querySize(vLogAtom, true, filterBlanks); } catch (NotStartedException e) { @@ -615,9 +619,9 @@ public long queryAnswerSize(PositiveLiteral query, boolean includeNulls) { } catch (NonExistingPredicateException e) { LOGGER.warn("Query uses predicate " + query.getPredicate() + " that does not occur in the knowledge base. Answer must be empty!"); - return 0; + result = 0; } - return result; + return new QueryAnswersSize(this.correctness, result); } @Override diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java index b320f1e34..ec9639244 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java @@ -66,6 +66,8 @@ public class QueryAnswerSizeTest { private static final Fact factPc = Expressions.makeFact(predP, c); private static final Fact factPd = Expressions.makeFact(predP, d); + private static final Fact factQc = Expressions.makeFact(predQ, c); + private static final Fact factQd = Expressions.makeFact(predQ, d); private static final Fact factQe = Expressions.makeFact(predQ, e); private static final Fact factQf = Expressions.makeFact(predQ, f); @@ -79,9 +81,9 @@ public void noFactsnoRules() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.queryAnswerSize(Px, true)); - assertEquals(0, reasoner.queryAnswerSize(Qx, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); + assertEquals(0, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); } } @@ -91,9 +93,9 @@ public void noFactsUniversalRule() throws IOException { kb.addStatement(QxPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.queryAnswerSize(Px, true)); - assertEquals(0, reasoner.queryAnswerSize(Qx, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); + assertEquals(0, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); } } @@ -103,9 +105,9 @@ public void noFactsExistentialRule() throws IOException { kb.addStatement(RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.queryAnswerSize(Px, true)); - assertEquals(0, reasoner.queryAnswerSize(Qx, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); + assertEquals(0, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); } } @@ -115,9 +117,9 @@ public void pFactsNoRules() throws IOException { kb.addStatements(factPc, factPd); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px, true)); - assertEquals(0, reasoner.queryAnswerSize(Qx, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); + assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); } } @@ -127,12 +129,16 @@ public void pFactsUniversalRule() throws IOException { kb.addStatements(factPc, factPd, QxPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px, true)); - assertEquals(2, reasoner.queryAnswerSize(Px, false)); - assertEquals(2, reasoner.queryAnswerSize(Qx, true)); - assertEquals(2, reasoner.queryAnswerSize(Qx, false)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); + assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); + assertEquals(1, reasoner.queryAnswerSize(factPc, false).getSize()); + assertEquals(1, reasoner.queryAnswerSize(factPd, false).getSize()); + assertEquals(1, reasoner.queryAnswerSize(factQc, false).getSize()); + assertEquals(1, reasoner.queryAnswerSize(factQd, false).getSize()); } } @@ -142,15 +148,15 @@ public void pFactsExistentialRule() throws IOException { kb.addStatements(factPc, factPd, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px)); - assertEquals(2, reasoner.queryAnswerSize(Px, true)); - assertEquals(2, reasoner.queryAnswerSize(Px, false)); - assertEquals(2, reasoner.queryAnswerSize(Qx)); - assertEquals(2, reasoner.queryAnswerSize(Qx, true)); - assertEquals(0, reasoner.queryAnswerSize(Qx, false)); - assertEquals(2, reasoner.queryAnswerSize(Rxy)); - assertEquals(2, reasoner.queryAnswerSize(Rxy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); + assertEquals(2, reasoner.queryAnswerSize(Px).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Qx, false).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Rxy).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); } } @@ -160,15 +166,15 @@ public void qFactsUniversalRule() throws IOException { kb.addStatements(factQe, factQf, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.queryAnswerSize(Px)); - assertEquals(0, reasoner.queryAnswerSize(Px, true)); - assertEquals(0, reasoner.queryAnswerSize(Px, false)); - assertEquals(2, reasoner.queryAnswerSize(Qx)); - assertEquals(2, reasoner.queryAnswerSize(Qx, true)); - assertEquals(2, reasoner.queryAnswerSize(Qx, false)); - assertEquals(0, reasoner.queryAnswerSize(Rxy)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); + assertEquals(0, reasoner.queryAnswerSize(Px).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Px, false).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); } } @@ -178,15 +184,15 @@ public void qFactsExistentialRule() throws IOException { kb.addStatements(factQe, factQf, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.queryAnswerSize(Px)); - assertEquals(0, reasoner.queryAnswerSize(Px, true)); - assertEquals(0, reasoner.queryAnswerSize(Px, false)); - assertEquals(2, reasoner.queryAnswerSize(Qx)); - assertEquals(2, reasoner.queryAnswerSize(Qx, true)); - assertEquals(2, reasoner.queryAnswerSize(Qx, false)); - assertEquals(0, reasoner.queryAnswerSize(Rxy)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); + assertEquals(0, reasoner.queryAnswerSize(Px).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Px, false).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); } } @@ -196,15 +202,15 @@ public void pFactsQFactsUniversalRule() throws IOException { kb.addStatements(factPc, factPd, factQe, factQf, QxPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px)); - assertEquals(2, reasoner.queryAnswerSize(Px, true)); - assertEquals(2, reasoner.queryAnswerSize(Px, false)); - assertEquals(4, reasoner.queryAnswerSize(Qx)); - assertEquals(4, reasoner.queryAnswerSize(Qx, true)); - assertEquals(4, reasoner.queryAnswerSize(Qx, false)); - assertEquals(0, reasoner.queryAnswerSize(Rxy)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); + assertEquals(2, reasoner.queryAnswerSize(Px).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); + assertEquals(4, reasoner.queryAnswerSize(Qx).getSize()); + assertEquals(4, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(4, reasoner.queryAnswerSize(Qx, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); } } @@ -214,25 +220,25 @@ public void pFactsQFactsExistentialRule() throws IOException { kb.addStatements(factPc, factPd, factQe, factQf, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px)); - assertEquals(2, reasoner.queryAnswerSize(Px, true)); - assertEquals(2, reasoner.queryAnswerSize(Px, false)); - assertEquals(4, reasoner.queryAnswerSize(Qx)); - assertEquals(4, reasoner.queryAnswerSize(Qx, true)); - assertEquals(2, reasoner.queryAnswerSize(Qx, false)); - assertEquals(2, reasoner.queryAnswerSize(Rxy)); - assertEquals(2, reasoner.queryAnswerSize(Rxy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); - - assertEquals(1, reasoner.queryAnswerSize(Rdy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rey, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxd, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxe, true)); - - assertEquals(0, reasoner.queryAnswerSize(Rdy, false)); - assertEquals(0, reasoner.queryAnswerSize(Rey, false)); - assertEquals(0, reasoner.queryAnswerSize(Rxd, false)); - assertEquals(0, reasoner.queryAnswerSize(Rxe, false)); + assertEquals(2, reasoner.queryAnswerSize(Px).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); + assertEquals(4, reasoner.queryAnswerSize(Qx).getSize()); + assertEquals(4, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx, false).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Rxy).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); + + assertEquals(1, reasoner.queryAnswerSize(Rdy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rey, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxd, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxe, true).getSize()); + + assertEquals(0, reasoner.queryAnswerSize(Rdy, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rey, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxd, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxe, false).getSize()); } } @@ -242,25 +248,25 @@ public void pFactsQFactsExistentialAndUniversalRule() throws IOException { kb.addStatements(factPc, factPd, factQe, factQf, QxPx, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px)); - assertEquals(2, reasoner.queryAnswerSize(Px, true)); - assertEquals(2, reasoner.queryAnswerSize(Px, false)); - assertEquals(6, reasoner.queryAnswerSize(Qx)); - assertEquals(6, reasoner.queryAnswerSize(Qx, true)); - assertEquals(4, reasoner.queryAnswerSize(Qx, false)); - assertEquals(2, reasoner.queryAnswerSize(Rxy)); - assertEquals(2, reasoner.queryAnswerSize(Rxy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); - - assertEquals(1, reasoner.queryAnswerSize(Rdy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rey, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxd, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxe, true)); - - assertEquals(0, reasoner.queryAnswerSize(Rdy, false)); - assertEquals(0, reasoner.queryAnswerSize(Rey, false)); - assertEquals(0, reasoner.queryAnswerSize(Rxd, false)); - assertEquals(0, reasoner.queryAnswerSize(Rxe, false)); + assertEquals(2, reasoner.queryAnswerSize(Px).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); + assertEquals(6, reasoner.queryAnswerSize(Qx).getSize()); + assertEquals(6, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(4, reasoner.queryAnswerSize(Qx, false).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Rxy).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); + + assertEquals(1, reasoner.queryAnswerSize(Rdy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rey, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxd, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxe, true).getSize()); + + assertEquals(0, reasoner.queryAnswerSize(Rdy, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rey, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxd, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxe, false).getSize()); } } @@ -270,17 +276,17 @@ public void pFactsLiteralWithSameVariables() throws IOException { kb.addStatements(factPc, factPd, RxxRxyRyyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px, true)); - assertEquals(2, reasoner.queryAnswerSize(Px, false)); + assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); - assertEquals(4, reasoner.queryAnswerSize(Rxx, true)); - assertEquals(2, reasoner.queryAnswerSize(Rxx, false)); + assertEquals(4, reasoner.queryAnswerSize(Rxx, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Rxx, false).getSize()); - assertEquals(6, reasoner.queryAnswerSize(Rxy, true)); - assertEquals(2, reasoner.queryAnswerSize(Rxy, false)); + assertEquals(6, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Rxy, false).getSize()); - assertEquals(4, reasoner.queryAnswerSize(Ryy, true)); - assertEquals(2, reasoner.queryAnswerSize(Ryy, false)); + assertEquals(4, reasoner.queryAnswerSize(Ryy, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Ryy, false).getSize()); } } From 37cf305f139a5d15307a8aec1c75f033e6c16b8a Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Thu, 27 Feb 2020 08:58:57 +0100 Subject: [PATCH 0492/1003] call reasoner.queryAnswerSize.getSize() instead of reasoner.queryAnswerSize --- .../vlog4j/client/picocli/VLog4jClientMaterialize.java | 2 +- .../semanticweb/vlog4j/examples/CompareWikidataDBpedia.java | 6 +++--- .../org/semanticweb/vlog4j/examples/CountingTriangles.java | 6 +++--- .../java/org/semanticweb/vlog4j/examples/DoidExample.java | 2 +- .../vlog4j/examples/InMemoryGraphAnalysisExample.java | 6 ++++-- .../examples/core/SkolemVsRestrictedChaseTermination.java | 2 +- .../semanticweb/vlog4j/examples/graal/DoidExampleGraal.java | 4 ++-- 7 files changed, 15 insertions(+), 13 deletions(-) diff --git a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java index 99b483243..b5ac9ff08 100644 --- a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java +++ b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java @@ -230,7 +230,7 @@ private void doSaveQueryResults(final Reasoner reasoner, final PositiveLiteral q } private void doPrintResults(final Reasoner reasoner, final PositiveLiteral query) { - System.out.println("Number of query answers in " + query + ": " + reasoner.queryAnswerSize(query)); + System.out.println("Number of query answers in " + query + ": " + reasoner.queryAnswerSize(query).getSize()); } private String queryOputputPath(final PositiveLiteral query) { diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java index ce4bd92c7..cbf837284 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java @@ -88,9 +88,9 @@ public static void main(final String[] args) throws ParsingException, IOExceptio try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - final double resultCount = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("result(?X)")); - final double wdCount = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("inWd(?X)")); - final double dbpCount = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("inDbp(?X)")); + final double resultCount = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("result(?X)")).getSize(); + final double wdCount = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("inWd(?X)")).getSize(); + final double dbpCount = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("inDbp(?X)")).getSize(); System.out.println("Found " + resultCount + " matching entities overall, of which " + wdCount + " were in Wikidata and " + dbpCount + " were in DBPedia"); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java index 63e1cb98e..79c66c1e6 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java @@ -61,9 +61,9 @@ public static void main(final String[] args) throws IOException, ParsingExceptio /* Initialise reasoner and compute inferences */ reasoner.reason(); - final double countries = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("country(?X)")); - final double shareBorder = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("shareBorder(?X,?Y)")); - final double triangles = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("triangle(?X,?Y,?Z)")); + final double countries = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("country(?X)")).getSize(); + final double shareBorder = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("shareBorder(?X,?Y)")).getSize(); + final double triangles = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("triangle(?X,?Y,?Z)")).getSize(); System.out.print("Found " + countries + " countries in Wikidata"); // Due to symmetry, each joint border is found twice, hence we divide by 2: diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java index 96f573d87..c947d534e 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java @@ -72,7 +72,7 @@ public static void main(final String[] args) throws IOException, ParsingExceptio final List queries = Arrays.asList("humansWhoDiedOfCancer(?X)", "humansWhoDiedOfNoncancer(?X)"); System.out.println("\nNumber of inferred tuples for selected query atoms:"); for (final String queryString : queries) { - double querySize = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral(queryString)); + double querySize = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral(queryString)).getSize(); System.out.println(" " + queryString + ": " + querySize); } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java index 87bc9927e..23d64cf1c 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java @@ -87,8 +87,10 @@ public static void main(final String[] args) throws ParsingException, IOExceptio try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - final double unreachable = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("unreachable(?X)")); - final double triangles = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("triangle(?X,?Y,?Z)")); + final double unreachable = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("unreachable(?X)")) + .getSize(); + final double triangles = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("triangle(?X,?Y,?Z)")) + .getSize(); System.out .println("Number of vertices not reachable from vertex 1 by a bi-directional path: " + unreachable); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java index 138c58d4b..5be95aebf 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java @@ -102,7 +102,7 @@ public static void main(final String[] args) throws IOException, ParsingExceptio * restrictions. */ System.out.println("Before the timeout, the Skolem chase had produced " - + reasoner.queryAnswerSize(queryHasPart) + " results for hasPart(?X, ?Y)."); + + reasoner.queryAnswerSize(queryHasPart).getSize() + " results for hasPart(?X, ?Y)."); /* * 6. We reset the reasoner to discard all inferences, and apply the Restricted diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java index e53b2845a..98bb8b7ac 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java @@ -131,10 +131,10 @@ public static void main(final String[] args) throws IOException { final PositiveLiteral humansWhoDiedOfCancer = Expressions.makePositiveLiteral("humansWhoDiedOfCancer", x); System.out.println("Humans in Wikidata who died in 2018 due to cancer: " - + reasoner.queryAnswerSize(humansWhoDiedOfCancer)); + + reasoner.queryAnswerSize(humansWhoDiedOfCancer).getSize()); System.out.println("Humans in Wikidata who died in 2018 due to some other cause: " - + reasoner.queryAnswerSize(humansWhoDiedOfNoncancer)); + + reasoner.queryAnswerSize(humansWhoDiedOfNoncancer).getSize()); System.out.println("Done."); } From 94178c76d8c210ec505b20723a8a5961e01d262e Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Thu, 27 Feb 2020 09:24:43 +0100 Subject: [PATCH 0493/1003] call logWarningOnCorrectness in queryAnswerSize --- .../vlog4j/core/reasoner/implementation/VLogReasoner.java | 1 + 1 file changed, 1 insertion(+) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 16c028194..22bb07ce7 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -621,6 +621,7 @@ public QueryAnswersSize queryAnswerSize(PositiveLiteral query, boolean includeNu + " that does not occur in the knowledge base. Answer must be empty!"); result = 0; } + logWarningOnCorrectness(); return new QueryAnswersSize(this.correctness, result); } From 05bcbe552fc606d2f0557b1c4758595b10a17bf0 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Thu, 27 Feb 2020 09:25:11 +0100 Subject: [PATCH 0494/1003] add and update javadoc --- .../core/reasoner/QueryAnswersSize.java | 41 +++++++++++++++++-- .../vlog4j/core/reasoner/Reasoner.java | 3 +- .../vlog4j/rdf/RdfModelConverter.java | 4 +- 3 files changed, 41 insertions(+), 7 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswersSize.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswersSize.java index 465d9bff6..238145584 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswersSize.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswersSize.java @@ -21,6 +21,31 @@ */ /** + * Container for correctness and size of a query. + * + * Depending on the state of the reasoning (materialisation) and its + * {@link KnowledgeBase}, the answers can have a different {@link Correctness} + *
              + *
            • If {@link Correctness#SOUND_AND_COMPLETE}, materialisation over current + * knowledge base has completed, and the query answers are guaranteed to be + * correct.
            • + *
            • If {@link Correctness#SOUND_BUT_INCOMPLETE}, the results are guaranteed + * to be sound, but may be incomplete. This can happen + *
                + *
              • when materialisation has not completed ({@link Reasoner#reason()} returns + * {@code false}),
              • + *
              • or when the knowledge base was modified after reasoning, and the + * materialisation does not reflect the current knowledge base. + * Re-materialisation ({@link Reasoner#reason()}) is required in order to obtain + * complete query answers with respect to the current knowledge base.
              • + *
              + *
            • + *
            • If {@link Correctness#INCORRECT}, the results may be incomplete, and some + * results may be unsound. This can happen when the knowledge base was modified + * and the reasoner materialisation is no longer consistent with the current + * knowledge base. Re-materialisation ({@link Reasoner#reason()}) is required, + * in order to obtain correct query answers. + *
            * * @author Larry González * @@ -30,10 +55,14 @@ public class QueryAnswersSize { final Correctness correctness; final long size; - public QueryAnswersSize(Correctness correctness, int size) { - this.correctness = correctness; - this.size = size; - } + /** + * Constructor of QueryAnswerSize + * + * @param correctness of the evaluated query. See {@link Correctness}. + * + * @param size of the evaluated query, i.e. number of facts in the + * extension of the query. + */ public QueryAnswersSize(Correctness correctness, long size) { this.correctness = correctness; @@ -57,6 +86,10 @@ public Correctness getCorrectness() { return this.correctness; } + /** + * + * @return query result correctness + */ public long getSize() { return this.size; } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 569ec414a..d57191648 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -387,7 +387,8 @@ static Reasoner getInstance() { * answers will only contain the {@link QueryResult}s with * terms of type {@link TermType#CONSTANT} (representing * named individuals). - * @return number of facts in the extension of the query. + * @return QueryAnswersSize that contains the Correctness and the number of + * facts in the extension of the query. */ QueryAnswersSize queryAnswerSize(PositiveLiteral query, boolean includeNulls); diff --git a/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfModelConverter.java b/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfModelConverter.java index d2f7f972b..0c58bb826 100644 --- a/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfModelConverter.java +++ b/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfModelConverter.java @@ -52,8 +52,8 @@ * String as name. *
          • {@link Literal}s are converted to {@link Constant}s with names containing * the canonical form of the literal label, the data type and the language.
          • - *
          • {@link BNode}s are converted to {@link NamedNull}s with the generated blank - * ID as name. {@link BNode}s have unique generated IDs in the context a + *
          • {@link BNode}s are converted to {@link NamedNull}s with the generated + * blank ID as name. {@link BNode}s have unique generated IDs in the context a * {@link Model}s. Blanks with the same name loaded from different models will * have different ids.
          • *
          From d18909f9d382b66a0eea1bc73d2365b783b60a09 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Thu, 27 Feb 2020 09:28:42 +0100 Subject: [PATCH 0495/1003] add test: use a fact as a query --- .../core/reasoner/implementation/QueryAnswerSizeTest.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java index ec9639244..818807ed1 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java @@ -135,10 +135,15 @@ public void pFactsUniversalRule() throws IOException { assertEquals(2, reasoner.queryAnswerSize(Qx, false).getSize()); assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); + assertEquals(1, reasoner.queryAnswerSize(factPc, true).getSize()); + assertEquals(1, reasoner.queryAnswerSize(factPd, true).getSize()); + assertEquals(1, reasoner.queryAnswerSize(factQc, true).getSize()); + assertEquals(1, reasoner.queryAnswerSize(factQd, true).getSize()); assertEquals(1, reasoner.queryAnswerSize(factPc, false).getSize()); assertEquals(1, reasoner.queryAnswerSize(factPd, false).getSize()); assertEquals(1, reasoner.queryAnswerSize(factQc, false).getSize()); assertEquals(1, reasoner.queryAnswerSize(factQd, false).getSize()); + } } From 04629ada4589d74a638aa044d02f07cc97172658 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Thu, 27 Feb 2020 14:19:17 +0100 Subject: [PATCH 0496/1003] add interface QueryAnswerSize and implementation QueryAnswerSizeImpl --- .../vlog4j/core/reasoner/QueryAnswerSize.java | 78 +++++++++++++++++++ .../QueryAnswerSizeImpl.java} | 0 2 files changed, 78 insertions(+) create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswerSize.java rename vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/{QueryAnswersSize.java => implementation/QueryAnswerSizeImpl.java} (100%) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswerSize.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswerSize.java new file mode 100644 index 000000000..d4522d875 --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswerSize.java @@ -0,0 +1,78 @@ +package org.semanticweb.vlog4j.core.reasoner; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * Container for correctness and number of query answers, i.e. the number of + * facts that the query maps to. + * + * Depending on the state of the reasoning (materialisation) and its + * {@link KnowledgeBase}, the answers can have a different {@link Correctness} + *
            + *
          • If {@link Correctness#SOUND_AND_COMPLETE}, materialisation over current + * knowledge base has completed, and the query answers are guaranteed to be + * correct.
          • + *
          • If {@link Correctness#SOUND_BUT_INCOMPLETE}, the results are guaranteed + * to be sound, but may be incomplete. This can happen + *
              + *
            • when materialisation has not completed ({@link Reasoner#reason()} returns + * {@code false}),
            • + *
            • or when the knowledge base was modified after reasoning, and the + * materialisation does not reflect the current knowledge base. + * Re-materialisation ({@link Reasoner#reason()}) is required in order to obtain + * complete query answers with respect to the current knowledge base.
            • + *
            + *
          • + *
          • If {@link Correctness#INCORRECT}, the results may be incomplete, and some + * results may be unsound. This can happen when the knowledge base was modified + * and the reasoner materialisation is no longer consistent with the current + * knowledge base. Re-materialisation ({@link Reasoner#reason()}) is required, + * in order to obtain correct query answers. + *
          + * + * @author Larry González + * + */ +public interface QueryAnswerSize { + + /** + * Returns the correctness of the query result. + *
            + *
          • If {@link Correctness#SOUND_AND_COMPLETE}, the query results are + * guaranteed to be correct.
          • + *
          • If {@link Correctness#SOUND_BUT_INCOMPLETE}, the results are guaranteed + * to be sound, but may be incomplete.
          • + *
          • If {@link Correctness#INCORRECT}, the results may be incomplete, and some + * results may be unsound. + *
          + * + * @return query result correctness + */ + Correctness getCorrectness(); + + /** + * + * @return number of query answers, i.e., the number of facts that the query + * maps to. + */ + long getSize(); + +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswersSize.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeImpl.java similarity index 100% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswersSize.java rename to vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeImpl.java From fad6ab6b5a8ddc1f8d24157a328e76721abc046e Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Thu, 27 Feb 2020 14:21:50 +0100 Subject: [PATCH 0497/1003] add interface QueryAnswerSize and implementation QueryAnswerSizeImpl --- .../implementation/QueryAnswerSizeImpl.java | 69 +++++-------------- 1 file changed, 16 insertions(+), 53 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeImpl.java index 238145584..504446f92 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeImpl.java @@ -1,4 +1,7 @@ -package org.semanticweb.vlog4j.core.reasoner; +package org.semanticweb.vlog4j.core.reasoner.implementation; + +import org.semanticweb.vlog4j.core.reasoner.Correctness; +import org.semanticweb.vlog4j.core.reasoner.QueryAnswerSize; /*- * #%L @@ -20,78 +23,38 @@ * #L% */ -/** - * Container for correctness and size of a query. - * - * Depending on the state of the reasoning (materialisation) and its - * {@link KnowledgeBase}, the answers can have a different {@link Correctness} - *
            - *
          • If {@link Correctness#SOUND_AND_COMPLETE}, materialisation over current - * knowledge base has completed, and the query answers are guaranteed to be - * correct.
          • - *
          • If {@link Correctness#SOUND_BUT_INCOMPLETE}, the results are guaranteed - * to be sound, but may be incomplete. This can happen - *
              - *
            • when materialisation has not completed ({@link Reasoner#reason()} returns - * {@code false}),
            • - *
            • or when the knowledge base was modified after reasoning, and the - * materialisation does not reflect the current knowledge base. - * Re-materialisation ({@link Reasoner#reason()}) is required in order to obtain - * complete query answers with respect to the current knowledge base.
            • - *
            - *
          • - *
          • If {@link Correctness#INCORRECT}, the results may be incomplete, and some - * results may be unsound. This can happen when the knowledge base was modified - * and the reasoner materialisation is no longer consistent with the current - * knowledge base. Re-materialisation ({@link Reasoner#reason()}) is required, - * in order to obtain correct query answers. - *
          - * - * @author Larry González - * - */ -public class QueryAnswersSize { +public class QueryAnswerSizeImpl implements QueryAnswerSize { - final Correctness correctness; - final long size; + final private Correctness correctness; + final private long size; /** * Constructor of QueryAnswerSize * * @param correctness of the evaluated query. See {@link Correctness}. * - * @param size of the evaluated query, i.e. number of facts in the + * @param size number of query answers, i.e. number of facts in the * extension of the query. */ - public QueryAnswersSize(Correctness correctness, long size) { + QueryAnswerSizeImpl(Correctness correctness, long size) { this.correctness = correctness; this.size = size; } - /** - * Returns the correctness of the query result. - *
            - *
          • If {@link Correctness#SOUND_AND_COMPLETE}, the query results are - * guaranteed to be correct.
          • - *
          • If {@link Correctness#SOUND_BUT_INCOMPLETE}, the results are guaranteed - * to be sound, but may be incomplete.
          • - *
          • If {@link Correctness#INCORRECT}, the results may be incomplete, and some - * results may be unsound. - *
          - * - * @return query result correctness - */ + @Override public Correctness getCorrectness() { return this.correctness; } - /** - * - * @return query result correctness - */ + @Override public long getSize() { return this.size; } + @Override + public String toString() { + return this.size + " (" + this.correctness.toString() + ")"; + } + } From 6a57bce078687d63da853a2464e690dada32e8e8 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Thu, 27 Feb 2020 14:22:56 +0100 Subject: [PATCH 0498/1003] fix javadoc; rename class --- .../vlog4j/core/reasoner/Reasoner.java | 17 ++++++++--------- .../reasoner/implementation/VLogReasoner.java | 8 ++++---- 2 files changed, 12 insertions(+), 13 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index d57191648..df794ea28 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -11,6 +11,7 @@ import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; +import org.semanticweb.vlog4j.core.reasoner.implementation.QueryAnswerSizeImpl; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; /* @@ -337,7 +338,7 @@ static Reasoner getInstance() { * @return queryAnswerSize(query, true), the number of facts in the extension of * the query. */ - QueryAnswersSize queryAnswerSize(PositiveLiteral query); + QueryAnswerSizeImpl queryAnswerSize(PositiveLiteral query); // TODO add examples to query javadoc /** @@ -380,17 +381,15 @@ static Reasoner getInstance() { * * @param query a {@link PositiveLiteral} representing the query to be * answered. - * @param includeNulls if {@code true}, {@link QueryResult}s containing terms of - * type {@link TermType#NAMED_NULL} (representing anonymous - * individuals introduced to satisfy rule existentially - * quantified variables) will be included. Otherwise, the - * answers will only contain the {@link QueryResult}s with - * terms of type {@link TermType#CONSTANT} (representing - * named individuals). + * @param includeNulls if {@code true}, facts with {@link TermType#NAMED_NULL} + * terms will be included in the {@link QueryAnswerSizeImpl}. + * Otherwise, facts with {@link TermType#NAMED_NULL} terms + * will be ignored. + * * @return QueryAnswersSize that contains the Correctness and the number of * facts in the extension of the query. */ - QueryAnswersSize queryAnswerSize(PositiveLiteral query, boolean includeNulls); + QueryAnswerSizeImpl queryAnswerSize(PositiveLiteral query, boolean includeNulls); // TODO add examples to query javadoc /** diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 22bb07ce7..c19c3caff 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -35,7 +35,7 @@ import org.semanticweb.vlog4j.core.reasoner.CyclicityResult; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.LogLevel; -import org.semanticweb.vlog4j.core.reasoner.QueryAnswersSize; +import org.semanticweb.vlog4j.core.reasoner.QueryAnswerSize; import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.ReasonerState; @@ -596,12 +596,12 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNul } @Override - public QueryAnswersSize queryAnswerSize(PositiveLiteral query) { + public QueryAnswerSizeImpl queryAnswerSize(PositiveLiteral query) { return queryAnswerSize(query, true); } @Override - public QueryAnswersSize queryAnswerSize(PositiveLiteral query, boolean includeNulls) { + public QueryAnswerSizeImpl queryAnswerSize(PositiveLiteral query, boolean includeNulls) { validateNotClosed(); if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); @@ -622,7 +622,7 @@ public QueryAnswersSize queryAnswerSize(PositiveLiteral query, boolean includeNu result = 0; } logWarningOnCorrectness(); - return new QueryAnswersSize(this.correctness, result); + return new QueryAnswerSizeImpl(this.correctness, result); } @Override From b7b6d91b84c9447b9c3c9d3df40c35c5cf7f4ac0 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Thu, 27 Feb 2020 14:34:33 +0100 Subject: [PATCH 0499/1003] test with unreleased vlog --- .travis.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 908b769d3..e363e83b3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -31,9 +31,9 @@ jobs: - dist: trusty -## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar -# before_install: -# - sh ./build-vlog-library.sh +# Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar + before_install: + - sh ./build-vlog-library.sh install: mvn install $OPTIONS -DskipTests=true From f222fa9ac56ea03cf98e67170bf39a8afafbaa6b Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Thu, 27 Feb 2020 14:55:54 +0100 Subject: [PATCH 0500/1003] go back with released version of vlog --- .travis.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index e363e83b3..908b769d3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -31,9 +31,9 @@ jobs: - dist: trusty -# Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar - before_install: - - sh ./build-vlog-library.sh +## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar +# before_install: +# - sh ./build-vlog-library.sh install: mvn install $OPTIONS -DskipTests=true From 663dda4778ca3ff6fcd1bfe2eee57de9746e6c33 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 27 Feb 2020 17:52:03 +0100 Subject: [PATCH 0501/1003] force travis to rebuild vLog --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 908b769d3..2c2ffc4af 100644 --- a/.travis.yml +++ b/.travis.yml @@ -33,7 +33,7 @@ jobs: ## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar # before_install: -# - sh ./build-vlog-library.sh + - sh ./build-vlog-library.sh install: mvn install $OPTIONS -DskipTests=true From 6a508757a6f97821789001e812c9503e76a3788e Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 27 Feb 2020 17:58:13 +0100 Subject: [PATCH 0502/1003] fix travis.yml --- .travis.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 2c2ffc4af..7c38cbb34 100644 --- a/.travis.yml +++ b/.travis.yml @@ -32,8 +32,7 @@ jobs: ## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar -# before_install: - - sh ./build-vlog-library.sh +before_install: - sh ./build-vlog-library.sh install: mvn install $OPTIONS -DskipTests=true From 76cca028b4d6f935c40209df425a40f987e7308a Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 27 Feb 2020 18:07:12 +0100 Subject: [PATCH 0503/1003] comment back VLog build code in travis script --- .travis.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 7c38cbb34..908b769d3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -32,7 +32,8 @@ jobs: ## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar -before_install: - sh ./build-vlog-library.sh +# before_install: +# - sh ./build-vlog-library.sh install: mvn install $OPTIONS -DskipTests=true From 608657a54f811d6f2556cbd28c088868f341e351 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 27 Feb 2020 18:08:37 +0100 Subject: [PATCH 0504/1003] try make travis build vLog --- .travis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 908b769d3..719168552 100644 --- a/.travis.yml +++ b/.travis.yml @@ -32,8 +32,8 @@ jobs: ## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar -# before_install: -# - sh ./build-vlog-library.sh +before_install: + - sh: ./build-vlog-library.sh install: mvn install $OPTIONS -DskipTests=true From 9025d0fe700c4679a58a4baafba59b1f9df40b1d Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 27 Feb 2020 18:16:51 +0100 Subject: [PATCH 0505/1003] try again to fix travis script so build-vlog-library.sh gets executed --- .travis.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 719168552..45354e497 100644 --- a/.travis.yml +++ b/.travis.yml @@ -32,8 +32,11 @@ jobs: ## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar +# before_install: +# - sh ./build-vlog-library.sh before_install: - - sh: ./build-vlog-library.sh + script: + - ./build-vlog-library.sh install: mvn install $OPTIONS -DskipTests=true From da5ace2a74ec71d1b1b9f8231e57985e68607666 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 27 Feb 2020 21:30:05 +0100 Subject: [PATCH 0506/1003] Don't run build-vlog-library.sh under bash Travis messes with the .bashrc, so any new shell spawned with TRAVIS_HOME unset will fail the build. Explicitly use dash (if it exists), or sh otherwise. --- .travis.yml | 7 +++---- build-vlog-library.sh | 0 2 files changed, 3 insertions(+), 4 deletions(-) mode change 100644 => 100755 build-vlog-library.sh diff --git a/.travis.yml b/.travis.yml index 45354e497..38e6bc229 100644 --- a/.travis.yml +++ b/.travis.yml @@ -32,11 +32,10 @@ jobs: ## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar -# before_install: -# - sh ./build-vlog-library.sh before_install: - script: - - ./build-vlog-library.sh + # explicitly avoid bash as travis screws with .bashrc, + # cf. https://travis-ci.community/t/travis-functions-no-such-file-or-directory/2286/12 + - "[ -x /bin/dash ] && /bin/dash ./build-vlog-library.sh || /bin/sh ./build-vlog-library.sh" install: mvn install $OPTIONS -DskipTests=true diff --git a/build-vlog-library.sh b/build-vlog-library.sh old mode 100644 new mode 100755 From 8b2e250d1d0727b9e026eaeb534cc2c99a39ff22 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 27 Feb 2020 22:20:33 +0100 Subject: [PATCH 0507/1003] Get rid of some warnings in .travis.yml --- .travis.yml | 15 ++++----------- 1 file changed, 4 insertions(+), 11 deletions(-) diff --git a/.travis.yml b/.travis.yml index 38e6bc229..66f6b2c3e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,10 +1,10 @@ language: java -matrix: +os: linux +jobs: include: - os: linux dist: bionic - jdk: - - openjdk11 + jdk: openjdk11 after_success: - mvn clean test jacoco:report coveralls:report @@ -19,18 +19,13 @@ matrix: - g++-6 - libstdc++6 env: CC=gcc-6 CXX=g++-6 - jdk: - - openjdk8 + jdk: openjdk8 - os: osx osx_image: xcode10.2 - - -jobs: allow_failures: - dist: trusty - ## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar before_install: # explicitly avoid bash as travis screws with .bashrc, @@ -39,8 +34,6 @@ before_install: install: mvn install $OPTIONS -DskipTests=true -sudo: false - cache: directories: - ./local_builds From 47237ef2829e219ec5acaace9a91a9399e687a65 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Fri, 28 Feb 2020 11:52:48 +0100 Subject: [PATCH 0508/1003] rename queryAnswerSize to countQueryAnswers --- .../picocli/VLog4jClientMaterialize.java | 2 +- .../vlog4j/core/reasoner/Reasoner.java | 23 +- .../reasoner/implementation/VLogReasoner.java | 21 +- .../implementation/QueryAnswerSizeTest.java | 216 +++++++++--------- .../examples/CompareWikidataDBpedia.java | 7 +- .../vlog4j/examples/CountingTriangles.java | 9 +- .../vlog4j/examples/DoidExample.java | 2 +- .../InMemoryGraphAnalysisExample.java | 4 +- .../SkolemVsRestrictedChaseTermination.java | 2 +- .../examples/graal/DoidExampleGraal.java | 4 +- 10 files changed, 144 insertions(+), 146 deletions(-) diff --git a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java index b5ac9ff08..ac71cd0d2 100644 --- a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java +++ b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java @@ -230,7 +230,7 @@ private void doSaveQueryResults(final Reasoner reasoner, final PositiveLiteral q } private void doPrintResults(final Reasoner reasoner, final PositiveLiteral query) { - System.out.println("Number of query answers in " + query + ": " + reasoner.queryAnswerSize(query).getSize()); + System.out.println("Number of query answers in " + query + ": " + reasoner.countQueryAnswers(query).getSize()); } private String queryOputputPath(final PositiveLiteral query) { diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 445799b44..bae08ac94 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -93,8 +93,7 @@ static Reasoner getInstance() { * Exports all the (explicit and implicit) facts inferred during reasoning of * the knowledge base to an OutputStream. * - * @param an - * OutputStream for the facts to be written to. + * @param an OutputStream for the facts to be written to. * @return the correctness of the query answers, depending on the state of the * reasoning (materialisation) and its {@link KnowledgeBase}. * @throws IOException @@ -105,8 +104,7 @@ static Reasoner getInstance() { * Exports all the (explicit and implicit) facts inferred during reasoning of * the knowledge base to a desired file. * - * @param a - * String of the file path for the facts to be written to. + * @param a String of the file path for the facts to be written to. * @return the correctness of the query answers, depending on the state of the * reasoning (materialisation) and its {@link KnowledgeBase}. * @throws IOException @@ -359,13 +357,16 @@ static Reasoner getInstance() { */ QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNulls); - /* + /** * @param query a {@link PositiveLiteral} representing the query to be answered. * * @return queryAnswerSize(query, true), the number of facts in the extension of - * the query. + * the query. */ - QueryAnswerSizeImpl queryAnswerSize(PositiveLiteral query); + + default QueryAnswerSizeImpl countQueryAnswers(PositiveLiteral query) { + return countQueryAnswers(query, true); + } // TODO add examples to query javadoc /** @@ -409,14 +410,14 @@ static Reasoner getInstance() { * @param query a {@link PositiveLiteral} representing the query to be * answered. * @param includeNulls if {@code true}, facts with {@link TermType#NAMED_NULL} - * terms will be included in the {@link QueryAnswerSizeImpl}. - * Otherwise, facts with {@link TermType#NAMED_NULL} terms - * will be ignored. + * terms will be included in the + * {@link QueryAnswerSizeImpl}. Otherwise, facts with + * {@link TermType#NAMED_NULL} terms will be ignored. * * @return QueryAnswersSize that contains the Correctness and the number of * facts in the extension of the query. */ - QueryAnswerSizeImpl queryAnswerSize(PositiveLiteral query, boolean includeNulls); + QueryAnswerSizeImpl countQueryAnswers(PositiveLiteral query, boolean includeNulls); // TODO add examples to query javadoc /** diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index b71ba53a0..5959a3e92 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -246,8 +246,8 @@ void load(final Predicate predicate, final InMemoryDataSource inMemoryDataSource * Checks if the loaded external data sources do in fact contain data of the * correct arity. * - * @throws IncompatiblePredicateArityException - * to indicate a problem (non-checked exception) + * @throws IncompatiblePredicateArityException to indicate a problem + * (non-checked exception) */ void validateDataSourcePredicateArities(final VLogKnowledgeBase vLogKB) throws IncompatiblePredicateArityException { @@ -260,13 +260,11 @@ void validateDataSourcePredicateArities(final VLogKnowledgeBase vLogKB) throws I * Checks if the loaded external data for a given source does in fact contain * data of the correct arity for the given predidate. * - * @param predicate - * the predicate for which data is loaded - * @param dataSource - * the data source used + * @param predicate the predicate for which data is loaded + * @param dataSource the data source used * - * @throws IncompatiblePredicateArityException - * to indicate a problem (non-checked exception) + * @throws IncompatiblePredicateArityException to indicate a problem + * (non-checked exception) */ void validateDataSourcePredicateArity(Predicate predicate, DataSource dataSource) throws IncompatiblePredicateArityException { @@ -409,12 +407,7 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNul } @Override - public QueryAnswerSizeImpl queryAnswerSize(PositiveLiteral query) { - return queryAnswerSize(query, true); - } - - @Override - public QueryAnswerSizeImpl queryAnswerSize(PositiveLiteral query, boolean includeNulls) { + public QueryAnswerSizeImpl countQueryAnswers(PositiveLiteral query, boolean includeNulls) { validateNotClosed(); if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java index 818807ed1..7a2f1af57 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java @@ -81,9 +81,9 @@ public void noFactsnoRules() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.queryAnswerSize(Px, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Qx, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Px, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Qx, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getSize()); } } @@ -93,9 +93,9 @@ public void noFactsUniversalRule() throws IOException { kb.addStatement(QxPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.queryAnswerSize(Px, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Qx, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Px, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Qx, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getSize()); } } @@ -105,9 +105,9 @@ public void noFactsExistentialRule() throws IOException { kb.addStatement(RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.queryAnswerSize(Px, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Qx, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Px, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Qx, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getSize()); } } @@ -117,9 +117,9 @@ public void pFactsNoRules() throws IOException { kb.addStatements(factPc, factPd); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Qx, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Qx, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getSize()); } } @@ -129,20 +129,20 @@ public void pFactsUniversalRule() throws IOException { kb.addStatements(factPc, factPd, QxPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Qx, true).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Qx, false).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); - assertEquals(1, reasoner.queryAnswerSize(factPc, true).getSize()); - assertEquals(1, reasoner.queryAnswerSize(factPd, true).getSize()); - assertEquals(1, reasoner.queryAnswerSize(factQc, true).getSize()); - assertEquals(1, reasoner.queryAnswerSize(factQd, true).getSize()); - assertEquals(1, reasoner.queryAnswerSize(factPc, false).getSize()); - assertEquals(1, reasoner.queryAnswerSize(factPd, false).getSize()); - assertEquals(1, reasoner.queryAnswerSize(factQc, false).getSize()); - assertEquals(1, reasoner.queryAnswerSize(factQd, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Qx, true).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Qx, false).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getSize()); + assertEquals(1, reasoner.countQueryAnswers(factPc, true).getSize()); + assertEquals(1, reasoner.countQueryAnswers(factPd, true).getSize()); + assertEquals(1, reasoner.countQueryAnswers(factQc, true).getSize()); + assertEquals(1, reasoner.countQueryAnswers(factQd, true).getSize()); + assertEquals(1, reasoner.countQueryAnswers(factPc, false).getSize()); + assertEquals(1, reasoner.countQueryAnswers(factPd, false).getSize()); + assertEquals(1, reasoner.countQueryAnswers(factQc, false).getSize()); + assertEquals(1, reasoner.countQueryAnswers(factQd, false).getSize()); } } @@ -153,15 +153,15 @@ public void pFactsExistentialRule() throws IOException { kb.addStatements(factPc, factPd, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Qx).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Qx, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Qx, false).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Rxy).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Rxy, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Qx).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Qx, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Qx, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Rxy).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Rxy, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getSize()); } } @@ -171,15 +171,15 @@ public void qFactsUniversalRule() throws IOException { kb.addStatements(factQe, factQf, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.queryAnswerSize(Px).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Px, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Px, false).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Qx).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Qx, true).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Qx, false).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Px).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Px, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Px, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Qx).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Qx, true).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Qx, false).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getSize()); } } @@ -189,15 +189,15 @@ public void qFactsExistentialRule() throws IOException { kb.addStatements(factQe, factQf, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.queryAnswerSize(Px).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Px, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Px, false).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Qx).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Qx, true).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Qx, false).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Px).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Px, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Px, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Qx).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Qx, true).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Qx, false).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getSize()); } } @@ -207,15 +207,15 @@ public void pFactsQFactsUniversalRule() throws IOException { kb.addStatements(factPc, factPd, factQe, factQf, QxPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); - assertEquals(4, reasoner.queryAnswerSize(Qx).getSize()); - assertEquals(4, reasoner.queryAnswerSize(Qx, true).getSize()); - assertEquals(4, reasoner.queryAnswerSize(Qx, false).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px, false).getSize()); + assertEquals(4, reasoner.countQueryAnswers(Qx).getSize()); + assertEquals(4, reasoner.countQueryAnswers(Qx, true).getSize()); + assertEquals(4, reasoner.countQueryAnswers(Qx, false).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getSize()); } } @@ -225,25 +225,25 @@ public void pFactsQFactsExistentialRule() throws IOException { kb.addStatements(factPc, factPd, factQe, factQf, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); - assertEquals(4, reasoner.queryAnswerSize(Qx).getSize()); - assertEquals(4, reasoner.queryAnswerSize(Qx, true).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Qx, false).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Rxy).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Rxy, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); - - assertEquals(1, reasoner.queryAnswerSize(Rdy, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rey, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxd, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxe, true).getSize()); - - assertEquals(0, reasoner.queryAnswerSize(Rdy, false).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rey, false).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxd, false).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxe, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px, false).getSize()); + assertEquals(4, reasoner.countQueryAnswers(Qx).getSize()); + assertEquals(4, reasoner.countQueryAnswers(Qx, true).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Qx, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Rxy).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Rxy, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getSize()); + + assertEquals(1, reasoner.countQueryAnswers(Rdy, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rey, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxd, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxe, true).getSize()); + + assertEquals(0, reasoner.countQueryAnswers(Rdy, false).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rey, false).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxd, false).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxe, false).getSize()); } } @@ -253,25 +253,25 @@ public void pFactsQFactsExistentialAndUniversalRule() throws IOException { kb.addStatements(factPc, factPd, factQe, factQf, QxPx, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); - assertEquals(6, reasoner.queryAnswerSize(Qx).getSize()); - assertEquals(6, reasoner.queryAnswerSize(Qx, true).getSize()); - assertEquals(4, reasoner.queryAnswerSize(Qx, false).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Rxy).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Rxy, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); - - assertEquals(1, reasoner.queryAnswerSize(Rdy, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rey, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxd, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxe, true).getSize()); - - assertEquals(0, reasoner.queryAnswerSize(Rdy, false).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rey, false).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxd, false).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxe, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px, false).getSize()); + assertEquals(6, reasoner.countQueryAnswers(Qx).getSize()); + assertEquals(6, reasoner.countQueryAnswers(Qx, true).getSize()); + assertEquals(4, reasoner.countQueryAnswers(Qx, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Rxy).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Rxy, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getSize()); + + assertEquals(1, reasoner.countQueryAnswers(Rdy, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rey, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxd, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxe, true).getSize()); + + assertEquals(0, reasoner.countQueryAnswers(Rdy, false).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rey, false).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxd, false).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxe, false).getSize()); } } @@ -281,17 +281,17 @@ public void pFactsLiteralWithSameVariables() throws IOException { kb.addStatements(factPc, factPd, RxxRxyRyyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px, false).getSize()); - assertEquals(4, reasoner.queryAnswerSize(Rxx, true).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Rxx, false).getSize()); + assertEquals(4, reasoner.countQueryAnswers(Rxx, true).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Rxx, false).getSize()); - assertEquals(6, reasoner.queryAnswerSize(Rxy, true).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Rxy, false).getSize()); + assertEquals(6, reasoner.countQueryAnswers(Rxy, true).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Rxy, false).getSize()); - assertEquals(4, reasoner.queryAnswerSize(Ryy, true).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Ryy, false).getSize()); + assertEquals(4, reasoner.countQueryAnswers(Ryy, true).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Ryy, false).getSize()); } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java index cbf837284..5ca7a7404 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java @@ -88,9 +88,10 @@ public static void main(final String[] args) throws ParsingException, IOExceptio try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - final double resultCount = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("result(?X)")).getSize(); - final double wdCount = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("inWd(?X)")).getSize(); - final double dbpCount = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("inDbp(?X)")).getSize(); + final double resultCount = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("result(?X)")) + .getSize(); + final double wdCount = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("inWd(?X)")).getSize(); + final double dbpCount = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("inDbp(?X)")).getSize(); System.out.println("Found " + resultCount + " matching entities overall, of which " + wdCount + " were in Wikidata and " + dbpCount + " were in DBPedia"); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java index 79c66c1e6..8f6799447 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java @@ -61,9 +61,12 @@ public static void main(final String[] args) throws IOException, ParsingExceptio /* Initialise reasoner and compute inferences */ reasoner.reason(); - final double countries = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("country(?X)")).getSize(); - final double shareBorder = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("shareBorder(?X,?Y)")).getSize(); - final double triangles = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("triangle(?X,?Y,?Z)")).getSize(); + final double countries = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("country(?X)")) + .getSize(); + final double shareBorder = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("shareBorder(?X,?Y)")) + .getSize(); + final double triangles = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("triangle(?X,?Y,?Z)")) + .getSize(); System.out.print("Found " + countries + " countries in Wikidata"); // Due to symmetry, each joint border is found twice, hence we divide by 2: diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java index c947d534e..1cf8f1646 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java @@ -72,7 +72,7 @@ public static void main(final String[] args) throws IOException, ParsingExceptio final List queries = Arrays.asList("humansWhoDiedOfCancer(?X)", "humansWhoDiedOfNoncancer(?X)"); System.out.println("\nNumber of inferred tuples for selected query atoms:"); for (final String queryString : queries) { - double querySize = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral(queryString)).getSize(); + double querySize = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral(queryString)).getSize(); System.out.println(" " + queryString + ": " + querySize); } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java index 23d64cf1c..5726478fa 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java @@ -87,9 +87,9 @@ public static void main(final String[] args) throws ParsingException, IOExceptio try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - final double unreachable = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("unreachable(?X)")) + final double unreachable = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("unreachable(?X)")) .getSize(); - final double triangles = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("triangle(?X,?Y,?Z)")) + final double triangles = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("triangle(?X,?Y,?Z)")) .getSize(); System.out diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java index 5be95aebf..21b5ef928 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java @@ -102,7 +102,7 @@ public static void main(final String[] args) throws IOException, ParsingExceptio * restrictions. */ System.out.println("Before the timeout, the Skolem chase had produced " - + reasoner.queryAnswerSize(queryHasPart).getSize() + " results for hasPart(?X, ?Y)."); + + reasoner.countQueryAnswers(queryHasPart).getSize() + " results for hasPart(?X, ?Y)."); /* * 6. We reset the reasoner to discard all inferences, and apply the Restricted diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java index 98bb8b7ac..4fde8b97e 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java @@ -131,10 +131,10 @@ public static void main(final String[] args) throws IOException { final PositiveLiteral humansWhoDiedOfCancer = Expressions.makePositiveLiteral("humansWhoDiedOfCancer", x); System.out.println("Humans in Wikidata who died in 2018 due to cancer: " - + reasoner.queryAnswerSize(humansWhoDiedOfCancer).getSize()); + + reasoner.countQueryAnswers(humansWhoDiedOfCancer).getSize()); System.out.println("Humans in Wikidata who died in 2018 due to some other cause: " - + reasoner.queryAnswerSize(humansWhoDiedOfNoncancer).getSize()); + + reasoner.countQueryAnswers(humansWhoDiedOfNoncancer).getSize()); System.out.println("Done."); } From 1f8f76f2ea3a5eb7fa7bb0ddd1fccba9d4246e7c Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Fri, 28 Feb 2020 12:08:58 +0100 Subject: [PATCH 0509/1003] fix javadoc; fix implort; add default method --- .../semanticweb/vlog4j/core/reasoner/Reasoner.java | 11 ++++++----- .../core/reasoner/implementation/VLogReasoner.java | 3 ++- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index bae08ac94..51e84ce71 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -13,7 +13,6 @@ import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.reasoner.implementation.QueryAnswerSizeImpl; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; /* @@ -360,11 +359,13 @@ static Reasoner getInstance() { /** * @param query a {@link PositiveLiteral} representing the query to be answered. * - * @return queryAnswerSize(query, true), the number of facts in the extension of - * the query. + * @return countQueryAnswers(query, true), i.e., the number of facts in the + * extension of the query, including answers with NamedNull terms that + * have been introduced during reasoning. See also + * {@link Reasoner#countQueryAnswers(PositiveLiteral, boolean)} */ - default QueryAnswerSizeImpl countQueryAnswers(PositiveLiteral query) { + default QueryAnswerSize countQueryAnswers(PositiveLiteral query) { return countQueryAnswers(query, true); } @@ -417,7 +418,7 @@ default QueryAnswerSizeImpl countQueryAnswers(PositiveLiteral query) { * @return QueryAnswersSize that contains the Correctness and the number of * facts in the extension of the query. */ - QueryAnswerSizeImpl countQueryAnswers(PositiveLiteral query, boolean includeNulls); + QueryAnswerSize countQueryAnswers(PositiveLiteral query, boolean includeNulls); // TODO add examples to query javadoc /** diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 5959a3e92..9ffe011cf 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -31,6 +31,7 @@ import org.semanticweb.vlog4j.core.reasoner.CyclicityResult; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.LogLevel; +import org.semanticweb.vlog4j.core.reasoner.QueryAnswerSize; import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.ReasonerState; @@ -407,7 +408,7 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNul } @Override - public QueryAnswerSizeImpl countQueryAnswers(PositiveLiteral query, boolean includeNulls) { + public QueryAnswerSize countQueryAnswers(PositiveLiteral query, boolean includeNulls) { validateNotClosed(); if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); From 2211f8a56b23a3c17a0bc3b1f6ff71d20e289d74 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 28 Feb 2020 17:17:48 +0100 Subject: [PATCH 0510/1003] rename "size" to "count" in class and field names; fix javadoc --- .../picocli/VLog4jClientMaterialize.java | 2 +- ...yAnswerSize.java => QueryAnswerCount.java} | 4 +- .../vlog4j/core/reasoner/Reasoner.java | 40 ++-- ...izeImpl.java => QueryAnswerCountImpl.java} | 16 +- .../reasoner/implementation/VLogReasoner.java | 32 ++- .../implementation/QueryAnswerSizeTest.java | 216 +++++++++--------- .../examples/CompareWikidataDBpedia.java | 6 +- .../vlog4j/examples/CountingTriangles.java | 6 +- .../vlog4j/examples/DoidExample.java | 2 +- .../InMemoryGraphAnalysisExample.java | 4 +- .../SkolemVsRestrictedChaseTermination.java | 2 +- .../examples/graal/DoidExampleGraal.java | 4 +- 12 files changed, 168 insertions(+), 166 deletions(-) rename vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/{QueryAnswerSize.java => QueryAnswerCount.java} (98%) rename vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/{QueryAnswerSizeImpl.java => QueryAnswerCountImpl.java} (78%) diff --git a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java index ac71cd0d2..44969e879 100644 --- a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java +++ b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java @@ -230,7 +230,7 @@ private void doSaveQueryResults(final Reasoner reasoner, final PositiveLiteral q } private void doPrintResults(final Reasoner reasoner, final PositiveLiteral query) { - System.out.println("Number of query answers in " + query + ": " + reasoner.countQueryAnswers(query).getSize()); + System.out.println("Number of query answers in " + query + ": " + reasoner.countQueryAnswers(query).getCount()); } private String queryOputputPath(final PositiveLiteral query) { diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswerSize.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswerCount.java similarity index 98% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswerSize.java rename to vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswerCount.java index d4522d875..3438c5e34 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswerSize.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswerCount.java @@ -51,7 +51,7 @@ * @author Larry González * */ -public interface QueryAnswerSize { +public interface QueryAnswerCount { /** * Returns the correctness of the query result. @@ -73,6 +73,6 @@ public interface QueryAnswerSize { * @return number of query answers, i.e., the number of facts that the query * maps to. */ - long getSize(); + long getCount(); } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 51e84ce71..55985c1e2 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -357,26 +357,32 @@ static Reasoner getInstance() { QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNulls); /** + * * Evaluates an atomic ({@code query}), and counts the number of query answer + * implicit facts loaded into the reasoner and the number of query answer + * explicit facts materialised by the reasoner. + * * @param query a {@link PositiveLiteral} representing the query to be answered. * - * @return countQueryAnswers(query, true), i.e., the number of facts in the - * extension of the query, including answers with NamedNull terms that - * have been introduced during reasoning. See also - * {@link Reasoner#countQueryAnswers(PositiveLiteral, boolean)} + * @return a {@link QueryAnswerCount} object that contains the query answers + * {@link Correctness} and the number of query answers (i.e. the number + * of facts in the extension of the query), including answers with + * {@link NamedNull} terms that have been introduced during reasoning. + * See also + * {@link Reasoner#countQueryAnswers(PositiveLiteral, boolean)}. */ - default QueryAnswerSize countQueryAnswers(PositiveLiteral query) { - return countQueryAnswers(query, true); + default QueryAnswerCount countQueryAnswers(final PositiveLiteral query) { + return this.countQueryAnswers(query, true); } // TODO add examples to query javadoc /** - * Evaluates an atomic ({@code query}), and returns the number of implicit facts - * loaded into the reasoner and the number of explicit facts materialised by the - * reasoner.
          - * An answer to the query is the terms a fact that matches the {@code query}: - * the fact predicate is the same as the {@code query} predicate, the - * {@link TermType#CONSTANT} terms of the {@code query} appear in the answer + * Evaluates an atomic ({@code query}), and counts the number of query answer + * implicit facts loaded into the reasoner and the number of query answer + * explicit facts materialised by the reasoner.
          + * An answer to the query is the term set of a fact that matches the + * {@code query}: the fact predicate is the same as the {@code query} predicate, + * the {@link TermType#CONSTANT} terms of the {@code query} appear in the answer * fact at the same term position, and the {@link TermType#VARIABLE} terms of * the {@code query} are matched by terms in the fact, either named * ({@link TermType#CONSTANT}) or anonymous ({@link TermType#NAMED_NULL}). The @@ -411,14 +417,14 @@ default QueryAnswerSize countQueryAnswers(PositiveLiteral query) { * @param query a {@link PositiveLiteral} representing the query to be * answered. * @param includeNulls if {@code true}, facts with {@link TermType#NAMED_NULL} - * terms will be included in the - * {@link QueryAnswerSizeImpl}. Otherwise, facts with + * terms will be counted. Otherwise, facts with * {@link TermType#NAMED_NULL} terms will be ignored. * - * @return QueryAnswersSize that contains the Correctness and the number of - * facts in the extension of the query. + * @return a {@link QueryAnswerCount} object that contains the query answers + * Correctness and the number query answers, i.e. the number of facts in + * the extension of the query. */ - QueryAnswerSize countQueryAnswers(PositiveLiteral query, boolean includeNulls); + QueryAnswerCount countQueryAnswers(PositiveLiteral query, boolean includeNulls); // TODO add examples to query javadoc /** diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerCountImpl.java similarity index 78% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeImpl.java rename to vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerCountImpl.java index 504446f92..68deeb6f0 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerCountImpl.java @@ -1,7 +1,7 @@ package org.semanticweb.vlog4j.core.reasoner.implementation; import org.semanticweb.vlog4j.core.reasoner.Correctness; -import org.semanticweb.vlog4j.core.reasoner.QueryAnswerSize; +import org.semanticweb.vlog4j.core.reasoner.QueryAnswerCount; /*- * #%L @@ -23,10 +23,10 @@ * #L% */ -public class QueryAnswerSizeImpl implements QueryAnswerSize { +public class QueryAnswerCountImpl implements QueryAnswerCount { final private Correctness correctness; - final private long size; + final private long count; /** * Constructor of QueryAnswerSize @@ -37,9 +37,9 @@ public class QueryAnswerSizeImpl implements QueryAnswerSize { * extension of the query. */ - QueryAnswerSizeImpl(Correctness correctness, long size) { + QueryAnswerCountImpl(Correctness correctness, long size) { this.correctness = correctness; - this.size = size; + this.count = size; } @Override @@ -48,13 +48,13 @@ public Correctness getCorrectness() { } @Override - public long getSize() { - return this.size; + public long getCount() { + return this.count; } @Override public String toString() { - return this.size + " (" + this.correctness.toString() + ")"; + return this.count + " (" + this.correctness.toString() + ")"; } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 9ffe011cf..e8db05863 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -31,7 +31,7 @@ import org.semanticweb.vlog4j.core.reasoner.CyclicityResult; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.LogLevel; -import org.semanticweb.vlog4j.core.reasoner.QueryAnswerSize; +import org.semanticweb.vlog4j.core.reasoner.QueryAnswerCount; import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.ReasonerState; @@ -383,11 +383,7 @@ private void runChase() { @Override public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNulls) { - validateNotClosed(); - if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { - throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); - } - Validate.notNull(query, "Query atom must not be null!"); + validateBeforeQuerying(query); final boolean filterBlanks = !includeNulls; final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); @@ -408,12 +404,8 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNul } @Override - public QueryAnswerSize countQueryAnswers(PositiveLiteral query, boolean includeNulls) { - validateNotClosed(); - if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { - throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); - } - Validate.notNull(query, "Query atom must not be null!"); + public QueryAnswerCount countQueryAnswers(PositiveLiteral query, boolean includeNulls) { + validateBeforeQuerying(query); final boolean filterBlanks = !includeNulls; final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); @@ -429,17 +421,13 @@ public QueryAnswerSize countQueryAnswers(PositiveLiteral query, boolean includeN result = 0; } logWarningOnCorrectness(); - return new QueryAnswerSizeImpl(this.correctness, result); + return new QueryAnswerCountImpl(this.correctness, result); } @Override public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final String csvFilePath, final boolean includeBlanks) throws IOException { - validateNotClosed(); - if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { - throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); - } - Validate.notNull(query, "Query atom must not be null!"); + validateBeforeQuerying(query); Validate.notNull(csvFilePath, "File to export query answer to must not be null!"); Validate.isTrue(csvFilePath.endsWith(".csv"), "Expected .csv extension for file [%s]!", csvFilePath); @@ -458,6 +446,14 @@ public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final St return this.correctness; } + private void validateBeforeQuerying(final PositiveLiteral query) { + validateNotClosed(); + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { + throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); + } + Validate.notNull(query, "Query atom must not be null!"); + } + @Override public Correctness writeInferences(OutputStream stream) throws IOException { validateNotClosed(); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java index 7a2f1af57..10f1b450b 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java @@ -81,9 +81,9 @@ public void noFactsnoRules() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.countQueryAnswers(Px, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Qx, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Qx, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getCount()); } } @@ -93,9 +93,9 @@ public void noFactsUniversalRule() throws IOException { kb.addStatement(QxPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.countQueryAnswers(Px, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Qx, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Qx, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getCount()); } } @@ -105,9 +105,9 @@ public void noFactsExistentialRule() throws IOException { kb.addStatement(RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.countQueryAnswers(Px, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Qx, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Qx, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getCount()); } } @@ -117,9 +117,9 @@ public void pFactsNoRules() throws IOException { kb.addStatements(factPc, factPd); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.countQueryAnswers(Px, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Qx, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Qx, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getCount()); } } @@ -129,20 +129,20 @@ public void pFactsUniversalRule() throws IOException { kb.addStatements(factPc, factPd, QxPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.countQueryAnswers(Px, true).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Px, false).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Qx, true).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Qx, false).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getSize()); - assertEquals(1, reasoner.countQueryAnswers(factPc, true).getSize()); - assertEquals(1, reasoner.countQueryAnswers(factPd, true).getSize()); - assertEquals(1, reasoner.countQueryAnswers(factQc, true).getSize()); - assertEquals(1, reasoner.countQueryAnswers(factQd, true).getSize()); - assertEquals(1, reasoner.countQueryAnswers(factPc, false).getSize()); - assertEquals(1, reasoner.countQueryAnswers(factPd, false).getSize()); - assertEquals(1, reasoner.countQueryAnswers(factQc, false).getSize()); - assertEquals(1, reasoner.countQueryAnswers(factQd, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Px, false).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Qx, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Qx, false).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getCount()); + assertEquals(1, reasoner.countQueryAnswers(factPc, true).getCount()); + assertEquals(1, reasoner.countQueryAnswers(factPd, true).getCount()); + assertEquals(1, reasoner.countQueryAnswers(factQc, true).getCount()); + assertEquals(1, reasoner.countQueryAnswers(factQd, true).getCount()); + assertEquals(1, reasoner.countQueryAnswers(factPc, false).getCount()); + assertEquals(1, reasoner.countQueryAnswers(factPd, false).getCount()); + assertEquals(1, reasoner.countQueryAnswers(factQc, false).getCount()); + assertEquals(1, reasoner.countQueryAnswers(factQd, false).getCount()); } } @@ -153,15 +153,15 @@ public void pFactsExistentialRule() throws IOException { kb.addStatements(factPc, factPd, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.countQueryAnswers(Px).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Px, true).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Px, false).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Qx).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Qx, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Qx, false).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Rxy).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Rxy, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Px, false).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Qx).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Qx, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Qx, false).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Rxy).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Rxy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getCount()); } } @@ -171,15 +171,15 @@ public void qFactsUniversalRule() throws IOException { kb.addStatements(factQe, factQf, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.countQueryAnswers(Px).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Px, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Px, false).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Qx).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Qx, true).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Qx, false).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Px).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Px, false).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Qx).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Qx, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Qx, false).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getCount()); } } @@ -189,15 +189,15 @@ public void qFactsExistentialRule() throws IOException { kb.addStatements(factQe, factQf, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.countQueryAnswers(Px).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Px, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Px, false).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Qx).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Qx, true).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Qx, false).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Px).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Px, false).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Qx).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Qx, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Qx, false).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getCount()); } } @@ -207,15 +207,15 @@ public void pFactsQFactsUniversalRule() throws IOException { kb.addStatements(factPc, factPd, factQe, factQf, QxPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.countQueryAnswers(Px).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Px, true).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Px, false).getSize()); - assertEquals(4, reasoner.countQueryAnswers(Qx).getSize()); - assertEquals(4, reasoner.countQueryAnswers(Qx, true).getSize()); - assertEquals(4, reasoner.countQueryAnswers(Qx, false).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Px, false).getCount()); + assertEquals(4, reasoner.countQueryAnswers(Qx).getCount()); + assertEquals(4, reasoner.countQueryAnswers(Qx, true).getCount()); + assertEquals(4, reasoner.countQueryAnswers(Qx, false).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getCount()); } } @@ -225,25 +225,25 @@ public void pFactsQFactsExistentialRule() throws IOException { kb.addStatements(factPc, factPd, factQe, factQf, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.countQueryAnswers(Px).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Px, true).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Px, false).getSize()); - assertEquals(4, reasoner.countQueryAnswers(Qx).getSize()); - assertEquals(4, reasoner.countQueryAnswers(Qx, true).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Qx, false).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Rxy).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Rxy, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getSize()); - - assertEquals(1, reasoner.countQueryAnswers(Rdy, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rey, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxd, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxe, true).getSize()); - - assertEquals(0, reasoner.countQueryAnswers(Rdy, false).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rey, false).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxd, false).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxe, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Px, false).getCount()); + assertEquals(4, reasoner.countQueryAnswers(Qx).getCount()); + assertEquals(4, reasoner.countQueryAnswers(Qx, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Qx, false).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Rxy).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Rxy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getCount()); + + assertEquals(1, reasoner.countQueryAnswers(Rdy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rey, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxd, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxe, true).getCount()); + + assertEquals(0, reasoner.countQueryAnswers(Rdy, false).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rey, false).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxd, false).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxe, false).getCount()); } } @@ -253,25 +253,25 @@ public void pFactsQFactsExistentialAndUniversalRule() throws IOException { kb.addStatements(factPc, factPd, factQe, factQf, QxPx, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.countQueryAnswers(Px).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Px, true).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Px, false).getSize()); - assertEquals(6, reasoner.countQueryAnswers(Qx).getSize()); - assertEquals(6, reasoner.countQueryAnswers(Qx, true).getSize()); - assertEquals(4, reasoner.countQueryAnswers(Qx, false).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Rxy).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Rxy, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getSize()); - - assertEquals(1, reasoner.countQueryAnswers(Rdy, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rey, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxd, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxe, true).getSize()); - - assertEquals(0, reasoner.countQueryAnswers(Rdy, false).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rey, false).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxd, false).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxe, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Px, false).getCount()); + assertEquals(6, reasoner.countQueryAnswers(Qx).getCount()); + assertEquals(6, reasoner.countQueryAnswers(Qx, true).getCount()); + assertEquals(4, reasoner.countQueryAnswers(Qx, false).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Rxy).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Rxy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getCount()); + + assertEquals(1, reasoner.countQueryAnswers(Rdy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rey, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxd, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxe, true).getCount()); + + assertEquals(0, reasoner.countQueryAnswers(Rdy, false).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rey, false).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxd, false).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxe, false).getCount()); } } @@ -281,17 +281,17 @@ public void pFactsLiteralWithSameVariables() throws IOException { kb.addStatements(factPc, factPd, RxxRxyRyyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.countQueryAnswers(Px, true).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Px, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Px, false).getCount()); - assertEquals(4, reasoner.countQueryAnswers(Rxx, true).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Rxx, false).getSize()); + assertEquals(4, reasoner.countQueryAnswers(Rxx, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Rxx, false).getCount()); - assertEquals(6, reasoner.countQueryAnswers(Rxy, true).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Rxy, false).getSize()); + assertEquals(6, reasoner.countQueryAnswers(Rxy, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Rxy, false).getCount()); - assertEquals(4, reasoner.countQueryAnswers(Ryy, true).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Ryy, false).getSize()); + assertEquals(4, reasoner.countQueryAnswers(Ryy, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Ryy, false).getCount()); } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java index 5ca7a7404..0e7d18b32 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java @@ -89,9 +89,9 @@ public static void main(final String[] args) throws ParsingException, IOExceptio reasoner.reason(); final double resultCount = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("result(?X)")) - .getSize(); - final double wdCount = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("inWd(?X)")).getSize(); - final double dbpCount = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("inDbp(?X)")).getSize(); + .getCount(); + final double wdCount = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("inWd(?X)")).getCount(); + final double dbpCount = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("inDbp(?X)")).getCount(); System.out.println("Found " + resultCount + " matching entities overall, of which " + wdCount + " were in Wikidata and " + dbpCount + " were in DBPedia"); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java index 8f6799447..a97e5438e 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java @@ -62,11 +62,11 @@ public static void main(final String[] args) throws IOException, ParsingExceptio reasoner.reason(); final double countries = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("country(?X)")) - .getSize(); + .getCount(); final double shareBorder = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("shareBorder(?X,?Y)")) - .getSize(); + .getCount(); final double triangles = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("triangle(?X,?Y,?Z)")) - .getSize(); + .getCount(); System.out.print("Found " + countries + " countries in Wikidata"); // Due to symmetry, each joint border is found twice, hence we divide by 2: diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java index 1cf8f1646..c88a900b9 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java @@ -72,7 +72,7 @@ public static void main(final String[] args) throws IOException, ParsingExceptio final List queries = Arrays.asList("humansWhoDiedOfCancer(?X)", "humansWhoDiedOfNoncancer(?X)"); System.out.println("\nNumber of inferred tuples for selected query atoms:"); for (final String queryString : queries) { - double querySize = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral(queryString)).getSize(); + double querySize = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral(queryString)).getCount(); System.out.println(" " + queryString + ": " + querySize); } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java index 5726478fa..3a1702e91 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java @@ -88,9 +88,9 @@ public static void main(final String[] args) throws ParsingException, IOExceptio reasoner.reason(); final double unreachable = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("unreachable(?X)")) - .getSize(); + .getCount(); final double triangles = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("triangle(?X,?Y,?Z)")) - .getSize(); + .getCount(); System.out .println("Number of vertices not reachable from vertex 1 by a bi-directional path: " + unreachable); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java index 21b5ef928..f9b46ff59 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java @@ -102,7 +102,7 @@ public static void main(final String[] args) throws IOException, ParsingExceptio * restrictions. */ System.out.println("Before the timeout, the Skolem chase had produced " - + reasoner.countQueryAnswers(queryHasPart).getSize() + " results for hasPart(?X, ?Y)."); + + reasoner.countQueryAnswers(queryHasPart).getCount() + " results for hasPart(?X, ?Y)."); /* * 6. We reset the reasoner to discard all inferences, and apply the Restricted diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java index 4fde8b97e..ecb80b742 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java @@ -131,10 +131,10 @@ public static void main(final String[] args) throws IOException { final PositiveLiteral humansWhoDiedOfCancer = Expressions.makePositiveLiteral("humansWhoDiedOfCancer", x); System.out.println("Humans in Wikidata who died in 2018 due to cancer: " - + reasoner.countQueryAnswers(humansWhoDiedOfCancer).getSize()); + + reasoner.countQueryAnswers(humansWhoDiedOfCancer).getCount()); System.out.println("Humans in Wikidata who died in 2018 due to some other cause: " - + reasoner.countQueryAnswers(humansWhoDiedOfNoncancer).getSize()); + + reasoner.countQueryAnswers(humansWhoDiedOfNoncancer).getCount()); System.out.println("Done."); } From 667a787ac34f0e51a471e4c6325bb73eee1a66db Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 28 Feb 2020 17:27:44 +0100 Subject: [PATCH 0511/1003] renamed test --- .../{QueryAnswerSizeTest.java => QueryAnswerCountTest.java} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/{QueryAnswerSizeTest.java => QueryAnswerCountTest.java} (99%) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerCountTest.java similarity index 99% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java rename to vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerCountTest.java index 10f1b450b..1a7dbd9aa 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerCountTest.java @@ -36,7 +36,7 @@ import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -public class QueryAnswerSizeTest { +public class QueryAnswerCountTest { private static final Predicate predP = Expressions.makePredicate("P", 1); private static final Predicate predQ = Expressions.makePredicate("Q", 1); From bfd60aadb09997719c58801a108e5da91c134fa2 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 28 Feb 2020 17:31:26 +0100 Subject: [PATCH 0512/1003] added more test cases for queries without variables --- .../implementation/QueryAnswerCountTest.java | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerCountTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerCountTest.java index 1a7dbd9aa..4e17d8bdf 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerCountTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerCountTest.java @@ -65,6 +65,7 @@ public class QueryAnswerCountTest { private static final Fact factPc = Expressions.makeFact(predP, c); private static final Fact factPd = Expressions.makeFact(predP, d); + private static final Fact factPe = Expressions.makeFact(predP, e); private static final Fact factQc = Expressions.makeFact(predQ, c); private static final Fact factQd = Expressions.makeFact(predQ, d); @@ -84,6 +85,9 @@ public void noFactsnoRules() throws IOException { assertEquals(0, reasoner.countQueryAnswers(Px, true).getCount()); assertEquals(0, reasoner.countQueryAnswers(Qx, true).getCount()); assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factPc, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factPd, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factQe, true).getCount()); } } @@ -96,6 +100,9 @@ public void noFactsUniversalRule() throws IOException { assertEquals(0, reasoner.countQueryAnswers(Px, true).getCount()); assertEquals(0, reasoner.countQueryAnswers(Qx, true).getCount()); assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factPc, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factPd, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factQe, true).getCount()); } } @@ -108,6 +115,9 @@ public void noFactsExistentialRule() throws IOException { assertEquals(0, reasoner.countQueryAnswers(Px, true).getCount()); assertEquals(0, reasoner.countQueryAnswers(Qx, true).getCount()); assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factPc, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factPd, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factQe, true).getCount()); } } @@ -120,6 +130,12 @@ public void pFactsNoRules() throws IOException { assertEquals(2, reasoner.countQueryAnswers(Px, true).getCount()); assertEquals(0, reasoner.countQueryAnswers(Qx, true).getCount()); assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getCount()); + + assertEquals(0, reasoner.countQueryAnswers(factQe, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factQc, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factQd, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factPe, true).getCount()); + } } From 556754f1afb398220e5cdc3272d59ca9cfe607b5 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 28 Feb 2020 17:33:04 +0100 Subject: [PATCH 0513/1003] renamed querySize variable to answersCount --- .../java/org/semanticweb/vlog4j/examples/DoidExample.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java index c88a900b9..e919be73c 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java @@ -72,8 +72,8 @@ public static void main(final String[] args) throws IOException, ParsingExceptio final List queries = Arrays.asList("humansWhoDiedOfCancer(?X)", "humansWhoDiedOfNoncancer(?X)"); System.out.println("\nNumber of inferred tuples for selected query atoms:"); for (final String queryString : queries) { - double querySize = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral(queryString)).getCount(); - System.out.println(" " + queryString + ": " + querySize); + double answersCount = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral(queryString)).getCount(); + System.out.println(" " + queryString + ": " + answersCount); } } } From 348ca05a5e4fa46eeabf72532dc1d7da22412cc4 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 28 Feb 2020 17:44:26 +0100 Subject: [PATCH 0514/1003] comment lines that build vLog jar in travis.yml --- .travis.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 66f6b2c3e..27c9754d3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -28,9 +28,9 @@ jobs: ## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar before_install: - # explicitly avoid bash as travis screws with .bashrc, - # cf. https://travis-ci.community/t/travis-functions-no-such-file-or-directory/2286/12 - - "[ -x /bin/dash ] && /bin/dash ./build-vlog-library.sh || /bin/sh ./build-vlog-library.sh" +# # explicitly avoid bash as travis screws with .bashrc, +# # cf. https://travis-ci.community/t/travis-functions-no-such-file-or-directory/2286/12 +# - "[ -x /bin/dash ] && /bin/dash ./build-vlog-library.sh || /bin/sh ./build-vlog-library.sh" install: mvn install $OPTIONS -DskipTests=true From 3ef15b78203ee64e6ed427d71bb76a201fee5a20 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 28 Feb 2020 18:40:02 +0100 Subject: [PATCH 0515/1003] uncomment travis.yml line that builds VLog jar --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 27c9754d3..cb87765d8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -30,7 +30,7 @@ jobs: before_install: # # explicitly avoid bash as travis screws with .bashrc, # # cf. https://travis-ci.community/t/travis-functions-no-such-file-or-directory/2286/12 -# - "[ -x /bin/dash ] && /bin/dash ./build-vlog-library.sh || /bin/sh ./build-vlog-library.sh" + - "[ -x /bin/dash ] && /bin/dash ./build-vlog-library.sh || /bin/sh ./build-vlog-library.sh" install: mvn install $OPTIONS -DskipTests=true From 8625f1574dee0be71dd46c69f7b73981586cce4c Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 2 Mar 2020 11:17:26 +0100 Subject: [PATCH 0516/1003] bump vlog version to require snapshot --- vlog4j-core/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vlog4j-core/pom.xml b/vlog4j-core/pom.xml index 502f9053c..3c51c676d 100644 --- a/vlog4j-core/pom.xml +++ b/vlog4j-core/pom.xml @@ -17,7 +17,7 @@ Core components of VLog4j: reasoner and model - 1.3.2 + 1.3.3-snapshot From 9a522240b3a7529879826452e252d95db64ed544 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 2 Mar 2020 11:20:25 +0100 Subject: [PATCH 0517/1003] Mention counting improvement --- RELEASE-NOTES.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index d3c2ed11d..5a1e07412 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -1,6 +1,18 @@ VLog4j Release Notes ==================== +VLog4j v0.6.0 +------------- + +Breaking changes: +* In the example package, `ExamplesUtils.getQueryAnswerCount(queryString, reasoner)` does no + longer exist. It can be replaced by + `reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral(queryString)).getCount()` + +New features: +* Counting query answers is more efficient now, using `Reasoner.countQueryAnswers()` + + VLog4j v0.5.0 ------------- From 6615608ade53900bd952a4ff567aadf2863f2a14 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 26 Nov 2019 02:17:06 +0100 Subject: [PATCH 0518/1003] Parser: Add support for configurable custom literal syntax --- .../parser/ConfigurableLiteralHandler.java | 44 +++++ .../vlog4j/parser/ParserConfiguration.java | 51 ++++++ .../vlog4j/parser/javacc/JavaCCParser.jj | 60 ++++++- .../parser/javacc/JavaCCParserBase.java | 48 +++++- .../RuleParserConfigurableLiteralTest.java | 154 ++++++++++++++++++ 5 files changed, 343 insertions(+), 14 deletions(-) create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ConfigurableLiteralHandler.java create mode 100644 vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ConfigurableLiteralHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ConfigurableLiteralHandler.java new file mode 100644 index 000000000..a98bfec68 --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ConfigurableLiteralHandler.java @@ -0,0 +1,44 @@ +package org.semanticweb.vlog4j.parser; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; + +/** + * Handler for parsing a configurable literal expression. + * + * @author Maximilian Marx + */ +@FunctionalInterface +public interface ConfigurableLiteralHandler { + /** + * Parse a Data Source Declaration. + * + * @param syntacticForm syntactic form of the literal expression. + * @param subParserFactory a factory for obtaining a SubParser, sharing the + * parser's state, but bound to new input. + * + * @throws ParsingException when the given syntactic form is invalid. + * @return an appropriate @{link Constant} instance. + */ + public Constant parseLiteral(String syntacticForm, final SubParserFactory subParserFactory) throws ParsingException; +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index 770c7fd16..3e89c30ea 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -29,6 +29,7 @@ import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase.ConfigurableLiteralDelimiter; import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; /** @@ -47,6 +48,11 @@ public class ParserConfiguration { */ private final HashMap datatypes = new HashMap<>(); + /** + * The registered configurable literals. + */ + private HashMap literals = new HashMap<>(); + /** * Register a new (type of) Data Source. * @@ -136,6 +142,41 @@ private Constant parseDatatypeConstant(final String lexicalForm, final String da return Expressions.makeDatatypeConstant(lexicalForm, type); } + /** + * Check if a handler for this + * {@link org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase.ConfigurableLiteralDelimiter} + * is registered + * + * @param delimiter delimiter to check. + * @return true if a handler for the given delimiter is registered. + */ + public boolean isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter delimiter) { + return literals.containsKey(delimiter); + } + + /** + * Parse a configurable literal. + * + * @param delimiter delimiter given for the syntactic form. + * @param syntacticForm syntantic form of the literal to parse. + * @param subParserFactory a {@link SubParserFactory} instance that creates + * parser with the same context as the current parser. + * + * @throws ParsingException when no handler for the literal is registered, or + * the given syntactic form is invalid. + * @return an appropriate {@link Constant} instance. + */ + public Constant parseConfigurableLiteral(ConfigurableLiteralDelimiter delimiter, String syntacticForm, + final SubParserFactory subParserFactory) throws ParsingException { + if (!isConfigurableLiteralRegistered(delimiter)) { + throw new ParsingException( + "No handler for configurable literal delimiter \"" + delimiter + "\" registered."); + } + + ConfigurableLiteralHandler handler = literals.get(delimiter); + return handler.parseLiteral(syntacticForm, subParserFactory); + } + /** * Register a new data type. * @@ -155,4 +196,14 @@ public ParserConfiguration registerDatatype(final String name, final DatatypeCon this.datatypes.put(name, handler); return this; } + + public ParserConfiguration registerLiteral(ConfigurableLiteralDelimiter delimiter, + ConfigurableLiteralHandler handler) throws IllegalArgumentException { + if (literals.containsKey(delimiter)) { + throw new IllegalArgumentException("Literal delimiter \"" + delimiter + "\" is already registered."); + } + + this.literals.put(delimiter, handler); + return this; + } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 2a127ba9b..c84af1322 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -285,6 +285,11 @@ Term term(FormulaContext context) throws PrefixDeclarationException: return Expressions.makeExistentialVariable(s); } | t = < VARORPREDNAME > { return createConstant(t.image); } +| try { + c = ConfigurableLiteral () { return c; } + } catch (ParsingException e) { + throw new ParseException("Invalid configurable literal expression: " + e.getMessage()); + } } /** [16] */ @@ -309,6 +314,42 @@ Constant RDFLiteral() throws PrefixDeclarationException: { return createConstant(lex, lang, dt); } } +Constant ConfigurableLiteral() throws ParsingException: +{ + Token t; +} +{ + ( LOOKAHEAD( < PIPE_DELIMINATED_LITERAL >, + { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.PIPE) } ) + t = < PIPE_DELIMINATED_LITERAL > { + return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.PIPE, + stripDelimiters(t.image, 1), + getSubParserFactory()); + } + | LOOKAHEAD( < HASH_DELIMINATED_LITERAL >, + { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.HASH) } ) + t = < HASH_DELIMINATED_LITERAL > { + return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.HASH, + stripDelimiters(t.image, 1), + getSubParserFactory()); + } + // | LOOKAHEAD( < BRACKET_DELIMINATED_LITERAL >, + // { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.BRACKET) } ) + // t = < BRACKET_DELIMINATED_LITERAL > { + // return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACKET, + // stripDelimiters(t.image, 1), + // getSubParserFactory()); + // } + | LOOKAHEAD( < BRACE_DELIMINATED_LITERAL >, + { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.BRACE) } ) + t = < BRACE_DELIMINATED_LITERAL > { + return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACE, + stripDelimiters(t.image, 1), + getSubParserFactory()); + } + ) +} + String Langtag() : { Token t; @@ -330,11 +371,10 @@ String String(): String lex; } { - ( - t = < STRING_LITERAL1 > { lex = stripQuotes(t.image); } - | t = < STRING_LITERAL2 > { lex = stripQuotes(t.image); } - | t = < STRING_LITERAL_LONG1 > { lex = stripQuotes3(t.image); } - | t = < STRING_LITERAL_LONG2 > { lex = stripQuotes3(t.image); } + ( t = < STRING_LITERAL1 > { lex = stripDelimiters(t.image, 1); } + | t = < STRING_LITERAL2 > { lex = stripDelimiters(t.image, 1); } + | t = < STRING_LITERAL_LONG1 > { lex = stripDelimiters(t.image, 3); } + | t = < STRING_LITERAL_LONG2 > { lex = stripDelimiters(t.image, 3); } ) { lex = unescapeStr(lex, t.beginLine, t.beginColumn); @@ -394,7 +434,7 @@ String IRIREF() : t = < IRI > { // we remove '<' and '>' - return t.image.substring(1,t.image.length()-1); + return stripDelimiters(t.image, 1); } } @@ -583,3 +623,11 @@ TOKEN : < PN_CHARS > )? > } + +TOKEN : +{ + < PIPE_DELIMINATED_LITERAL : ( "|" (~[ "|" ])* "|" ) > : DEFAULT +| < HASH_DELIMINATED_LITERAL : ( "#" (~[ "#" ])* "#" ) > : DEFAULT +// | < BRACKET_DELIMINATED_LITERAL : ( "[" (~[ "]" ])* "]" ) > : DEFAULT +| < BRACE_DELIMINATED_LITERAL : ( "{" (~[ "}" ])* "}" ) > : DEFAULT +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 63173e270..f55801423 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -47,6 +47,7 @@ * * @author Markus Kroetzsch * @author Larry Gonzalez + * @author Maximilian Marx * @author Jena developers, Apache Software Foundation (ASF) * */ @@ -88,6 +89,35 @@ public enum FormulaContext { BODY } + /** + * Defines delimiters for configurable literals. + * + * Since the parser is generated from a fixed grammar, we need to provide + * productions for these literals, even if they are not part of the syntax. With + * the {@link DefaultParserConfiguration}, any occurence of these literals will + * result in a {@link ParseException}. + * + * @author Maximilian Marx + */ + public enum ConfigurableLiteralDelimiter { + /** + * Literals of the form {@code |…|} + */ + PIPE, + /** + * Literals of the form {@code #…#} + */ + HASH, + /** + * Literals of the form {@code […]} + */ + BRACKET, + /** + * Literals of the form {@code {…}} + */ + BRACE, + } + public JavaCCParserBase() { this.knowledgeBase = new KnowledgeBase(); this.prefixDeclarations = new LocalPrefixDeclarations(); @@ -211,14 +241,16 @@ static String unescape(String s, char escape, boolean pointCodeOnly, int line, i return sb.toString(); } - /** Remove first and last characters (e.g. ' or "") from a string */ - static String stripQuotes(String s) { - return s.substring(1, s.length() - 1); - } - - /** Remove first 3 and last 3 characters (e.g. ''' or """) from a string */ - static String stripQuotes3(String s) { - return s.substring(3, s.length() - 3); + /** + * Remove the first and last {@code n} characters from string {@code s} + * + * @param s string to strip delimiters from + * @param n number of characters to strip from both ends + * + * @return the stripped string. + */ + static String stripDelimiters(String s, int n) { + return s.substring(n, s.length() - n); } /** remove the first n charcacters from the string */ diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java new file mode 100644 index 000000000..ff6de9d2e --- /dev/null +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java @@ -0,0 +1,154 @@ +package org.semanticweb.vlog4j.syntax.parser; + +/*- + * #%L + * VLog4j Parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; +import static org.mockito.Mockito.*; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; + +import org.junit.Test; +import org.junit.Ignore; +import org.mockito.ArgumentMatchers; +import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.Literal; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.parser.ConfigurableLiteralHandler; +import org.semanticweb.vlog4j.parser.ParserConfiguration; +import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.RuleParser; +import org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase.ConfigurableLiteralDelimiter; +import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; + +public class RuleParserConfigurableLiteralTest { + public static final Constant pipeConstant = Expressions.makeAbstractConstant("testPipe"); + public static final Constant hashConstant = Expressions.makeAbstractConstant("testHash"); + public static final Constant bracketConstant = Expressions.makeAbstractConstant("testBracket"); + public static final Constant braceConstant = Expressions.makeAbstractConstant("testBrace"); + + public static final ConfigurableLiteralHandler pipeHandler = getMockLiteralHandler( + ConfigurableLiteralDelimiter.PIPE, pipeConstant); + public static final ConfigurableLiteralHandler hashHandler = getMockLiteralHandler( + ConfigurableLiteralDelimiter.HASH, hashConstant); + public static final ConfigurableLiteralHandler bracketHandler = getMockLiteralHandler( + ConfigurableLiteralDelimiter.BRACKET, bracketConstant); + public static final ConfigurableLiteralHandler braceHandler = getMockLiteralHandler( + ConfigurableLiteralDelimiter.BRACE, braceConstant); + + @Test(expected = ParsingException.class) + public void testNoDefaultPipeLiteral() throws ParsingException { + RuleParser.parseLiteral("p(|test|)"); + } + + @Test + public void testCustomLiteralRegistration() throws ParsingException { + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler); + assertTrue("Configurable Literal Handler has been registered", + parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.PIPE)); + } + + @Test(expected = IllegalArgumentException.class) + public void testNoDuplicateCustomLiteralRegistration() throws ParsingException, IllegalArgumentException { + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler) + .registerLiteral(ConfigurableLiteralDelimiter.PIPE, hashHandler); + } + + @Test + public void testCustomPipeLiteral() throws ParsingException { + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler); + Literal result = RuleParser.parseLiteral("p(|test|)", parserConfiguration); + assertEquals(pipeConstant, result.getConstants().toArray()[0]); + } + + @Test + public void testCustomHashLiteral() throws ParsingException { + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.HASH, hashHandler); + Literal result = RuleParser.parseLiteral("p(#test#)", parserConfiguration); + assertEquals(hashConstant, result.getConstants().toArray()[0]); + } + + @Test + @Ignore + public void testCustomBracketLiteral() throws ParsingException { + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, bracketHandler); + Literal result = RuleParser.parseLiteral("p([test])", parserConfiguration); + assertEquals(bracketConstant, result.getConstants().toArray()[0]); + } + + @Test + public void testCustomBraceLiteral() throws ParsingException { + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, braceHandler); + Literal result = RuleParser.parseLiteral("p({test})", parserConfiguration); + assertEquals(braceConstant, result.getConstants().toArray()[0]); + } + + @Test + @Ignore + public void testMixedCustomLiterals() throws ParsingException { + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler) + .registerLiteral(ConfigurableLiteralDelimiter.HASH, hashHandler) + .registerLiteral(ConfigurableLiteralDelimiter.BRACKET, bracketHandler) + .registerLiteral(ConfigurableLiteralDelimiter.BRACE, braceHandler); + Literal result = RuleParser.parseLiteral("p(||, #test#, [[], {})", parserConfiguration); + List constants = result.getConstants().collect(Collectors.toList()); + List expected = new ArrayList<>( + Arrays.asList(pipeConstant, hashConstant, bracketConstant, braceConstant)); + assertEquals(expected, constants); + } + + @Test + public void testNonTrivialCustomPipeLiteral() throws ParsingException { + String label = "this is a test, do not worry."; + String input = "p(|" + label + "|)"; + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, + (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + Literal result = RuleParser.parseLiteral(input, parserConfiguration); + assertEquals(makeReversedConstant(label), result.getConstants().toArray()[0]); + } + + static Constant makeReversedConstant(String name) { + StringBuilder builder = new StringBuilder(name); + return Expressions.makeAbstractConstant(builder.reverse().toString()); + } + + static ConfigurableLiteralHandler getMockLiteralHandler(ConfigurableLiteralDelimiter delimiter, Constant constant) { + ConfigurableLiteralHandler handler = mock(ConfigurableLiteralHandler.class); + try { + doReturn(constant).when(handler).parseLiteral(ArgumentMatchers.anyString(), + ArgumentMatchers.any()); + } catch (ParsingException e) { + // ignore it, since the mock will not throw + } + return handler; + } + +} From 2bc503b755eb790cf58f2a028ec845d3eacc0927 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 23 Jan 2020 17:52:25 +0100 Subject: [PATCH 0519/1003] Parser: Fix typo --- .../src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index 431a01d0f..6b7a5eec2 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -182,7 +182,7 @@ public static Literal parseLiteral(final String input) throws ParsingException { public static PositiveLiteral parsePositiveLiteral(final String input, final ParserConfiguration parserConfiguration) throws ParsingException { - return parseSyntaxFragment(input, parser -> parser.positiveLiteral(FormulaContext.HEAD), "positivel literal", + return parseSyntaxFragment(input, parser -> parser.positiveLiteral(FormulaContext.HEAD), "positive literal", parserConfiguration); } From 1617836bb89bf290666ae579a665672e01831eb4 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 24 Jan 2020 18:23:09 +0100 Subject: [PATCH 0520/1003] Parser: Make grammar stateful --- ...eryResultDataSourceDeclarationHandler.java | 12 +- .../vlog4j/parser/javacc/JavaCCParser.jj | 503 +++++++++--------- 2 files changed, 246 insertions(+), 269 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java index 3524fcb0f..bfed050f4 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java @@ -47,11 +47,13 @@ public DataSource handleDeclaration(List arguments, final SubParserFacto String endpoint = arguments.get(0); JavaCCParser parser = subParserFactory.makeSubParser(endpoint); String parsedEndpoint; - try { - parsedEndpoint = parser.IRI(false); - } catch (ParseException | PrefixDeclarationException e) { - throw new ParsingException("Error while parsing endpoint IRI in SPARQL query data source: " + e.getMessage(), e); - } + // try { + /// @TODO: actually make sure that this is a valid IRI + parsedEndpoint = endpoint.substring(1, endpoint.length() - 1); + //parsedEndpoint = parser.quotedIri(); + // } catch (ParseException | PrefixDeclarationException e) { + // throw new ParsingException("Error while parsing endpoint IRI in SPARQL query data source: " + e.getMessage(), e); + // } URL endpointUrl; try { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index c84af1322..c90c5e5da 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -20,7 +20,9 @@ import java.net.URL; import java.net.MalformedURLException; import java.util.List; +import java.util.Deque; import java.util.ArrayList; +import java.util.ArrayDeque; import java.util.LinkedList; import org.semanticweb.vlog4j.parser.ParsingException; @@ -57,43 +59,61 @@ public class JavaCCParser extends JavaCCParserBase PARSER_END(JavaCCParser) +TOKEN_MGR_DECLS : { + // use initializer block to work around auto-generated constructors. + { + states = new ArrayDeque(); + } -void parse() throws PrefixDeclarationException: -{ + Deque states; + + void pushState() { + states.push(curLexState); + } + + void popState() { + SwitchTo(states.pop()); + } } -{ - ( base() )? - ( prefix() )* - ( source() )* - ( statement() )* - < EOF > + + +void parse() throws PrefixDeclarationException : { +} { + ( base() )? + ( prefix() )* + ( source() )* + ( statement() )* + < EOF > } -void base() throws PrefixDeclarationException: -{ - String iriString; +void base() throws PrefixDeclarationException : { + Token iri; +} { + < BASE > iri = < IRI_ABSOLUTE > < DOT > { + prefixDeclarations.setBase(iri.image); + } } -{ - < BASE > iriString = IRIREF() < DOT > - { - prefixDeclarations.setBase(iriString); - } + +void prefix() throws PrefixDeclarationException : { + Token pn; + String iri; +} { + < PREFIX > pn = < PNAME_NS > iri = absoluteIri() < DOT > { + prefixDeclarations.setPrefix(pn.image, iri); + } } -void prefix() throws PrefixDeclarationException: -{ - Token t; - String iriString; +String absoluteIri() throws PrefixDeclarationException : { + Token iri; +} { + iri = < IRI_ABSOLUTE > { return prefixDeclarations.absolutize(iri.image); } + | iri = < PNAME_LN > { return prefixDeclarations.resolvePrefixedName(iri.image); } } -{ - ( - LOOKAHEAD(< COLON >) < PREFIX > t = < COLON > iriString = IRIREF() < DOT > - | < PREFIX > t = < PNAME_NS > iriString = IRIREF() < DOT > - ) - { - //note that prefix includes the colon (:) - prefixDeclarations.setPrefix(t.image, iriString); - } + +String quotedIri() throws PrefixDeclarationException : { + String iri; +} { + iri = absoluteIri() { return "<" + iri + ">"; } } void source() throws PrefixDeclarationException: @@ -103,7 +123,7 @@ void source() throws PrefixDeclarationException: Token arity; } { - < SOURCE > predicateName = predicateName() < LBRACK > arity = < INTEGER > < RBRACK > < COLON > dataSource = dataSource() < DOT > + < SOURCE > predicateName = predicateName() arity = < ARITY > < COLON > dataSource = dataSource() < DOT > { int nArity; nArity = Integer.parseInt(arity.image); @@ -119,7 +139,7 @@ DataSource dataSource() throws PrefixDeclarationException: List< String > arguments; } { - (sourceName = < DIRECTIVENAME > | sourceName = < VARORPREDNAME >) < LPAREN > arguments = Arguments() < RPAREN > + (sourceName = < ARGUMENT_NAME > | sourceName = < VARORPREDNAME >) < LPAREN > arguments = Arguments() < RPAREN > { try { return parserConfiguration.parseDataSourceSpecificPartOfDataSourceDeclaration(sourceName.image, arguments, getSubParserFactory()); @@ -246,14 +266,12 @@ List < Term > listOfTerms(FormulaContext context) throws PrefixDeclarationExcept { return list; } } -String predicateName() throws PrefixDeclarationException: -{ - String s; +String predicateName() throws PrefixDeclarationException : { Token t; -} -{ - s = IRI(false) { return s; } -| t = < VARORPREDNAME > { return prefixDeclarations.absolutize(t.image); } + String s; +} { + s = absoluteIri() { return s; } + | t = < VARORPREDNAME > { return prefixDeclarations.absolutize(t.image); } } Term term(FormulaContext context) throws PrefixDeclarationException: @@ -285,11 +303,11 @@ Term term(FormulaContext context) throws PrefixDeclarationException: return Expressions.makeExistentialVariable(s); } | t = < VARORPREDNAME > { return createConstant(t.image); } -| try { - c = ConfigurableLiteral () { return c; } - } catch (ParsingException e) { - throw new ParseException("Invalid configurable literal expression: " + e.getMessage()); - } +// | try { +// c = ConfigurableLiteral () { return c; } +// } catch (ParsingException e) { +// throw new ParseException("Invalid configurable literal expression: " + e.getMessage()); +// } } /** [16] */ @@ -314,41 +332,41 @@ Constant RDFLiteral() throws PrefixDeclarationException: { return createConstant(lex, lang, dt); } } -Constant ConfigurableLiteral() throws ParsingException: -{ - Token t; -} -{ - ( LOOKAHEAD( < PIPE_DELIMINATED_LITERAL >, - { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.PIPE) } ) - t = < PIPE_DELIMINATED_LITERAL > { - return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.PIPE, - stripDelimiters(t.image, 1), - getSubParserFactory()); - } - | LOOKAHEAD( < HASH_DELIMINATED_LITERAL >, - { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.HASH) } ) - t = < HASH_DELIMINATED_LITERAL > { - return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.HASH, - stripDelimiters(t.image, 1), - getSubParserFactory()); - } - // | LOOKAHEAD( < BRACKET_DELIMINATED_LITERAL >, - // { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.BRACKET) } ) - // t = < BRACKET_DELIMINATED_LITERAL > { - // return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACKET, - // stripDelimiters(t.image, 1), - // getSubParserFactory()); - // } - | LOOKAHEAD( < BRACE_DELIMINATED_LITERAL >, - { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.BRACE) } ) - t = < BRACE_DELIMINATED_LITERAL > { - return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACE, - stripDelimiters(t.image, 1), - getSubParserFactory()); - } - ) -} +// Constant ConfigurableLiteral() throws ParsingException: +// { +// Token t; +// } +// { +// ( LOOKAHEAD( < PIPE_DELIMINATED_LITERAL >, +// { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.PIPE) } ) +// t = < PIPE_DELIMINATED_LITERAL > { +// return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.PIPE, +// stripDelimiters(t.image, 1), +// getSubParserFactory()); +// } +// | LOOKAHEAD( < HASH_DELIMINATED_LITERAL >, +// { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.HASH) } ) +// t = < HASH_DELIMINATED_LITERAL > { +// return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.HASH, +// stripDelimiters(t.image, 1), +// getSubParserFactory()); +// } +// // | LOOKAHEAD( < BRACKET_DELIMINATED_LITERAL >, +// // { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.BRACKET) } ) +// // t = < BRACKET_DELIMINATED_LITERAL > { +// // return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACKET, +// // stripDelimiters(t.image, 1), +// // getSubParserFactory()); +// // } +// | LOOKAHEAD( < BRACE_DELIMINATED_LITERAL >, +// { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.BRACE) } ) +// t = < BRACE_DELIMINATED_LITERAL > { +// return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACE, +// stripDelimiters(t.image, 1), +// getSubParserFactory()); +// } +// ) +// } String Langtag() : { @@ -371,10 +389,10 @@ String String(): String lex; } { - ( t = < STRING_LITERAL1 > { lex = stripDelimiters(t.image, 1); } - | t = < STRING_LITERAL2 > { lex = stripDelimiters(t.image, 1); } - | t = < STRING_LITERAL_LONG1 > { lex = stripDelimiters(t.image, 3); } - | t = < STRING_LITERAL_LONG2 > { lex = stripDelimiters(t.image, 3); } + ( t = < SINGLE_QUOTED_STRING > { lex = stripDelimiters(t.image, 1); } + | t = < DOUBLE_QUOTED_STRING > { lex = stripDelimiters(t.image, 1); } + | t = < TRIPLE_QUOTED_STRING > { lex = stripDelimiters(t.image, 3); } + | t = < SIXFOLD_QUOTED_STRING > { lex = stripDelimiters(t.image, 3); } ) { lex = unescapeStr(lex, t.beginLine, t.beginColumn); @@ -388,7 +406,7 @@ LinkedList< String > Arguments() throws PrefixDeclarationException: LinkedList< String > rest = new LinkedList< String >(); } { - (str = String() | str = IRI(true)) [< COMMA > rest = Arguments()] + (str = String() | str = quotedIri()) [< COMMA > rest = Arguments()] { rest.addFirst(str); return rest; @@ -401,7 +419,7 @@ String IRI(boolean includeAngleBrackets) throws PrefixDeclarationException: } { ( - iri = IRIREF() + iri = IRIREF() | iri = PrefixedName() ) { @@ -439,195 +457,152 @@ String IRIREF() : } // ------------------------------------------ + // Whitespace -SKIP : -{ - " " -| "\t" -| "\n" -| "\r" -| "\f" +< * > SKIP : { + < WHITESPACE : [ " ", "\t", "\n", "\r", "\f" ] > } -//Comments -SKIP :{< "%" (~["\n"])* "\n" >} +// Comments +< * > SKIP : { + < COMMENT : "%" ( ~[ "\n" ] )* "\n" > +} // ------------------------------------------ -TOKEN : -{ - < PREFIX : "@prefix" > -| < BASE : "@base" > -| < SOURCE : "@source" > +MORE : { + "@": DIRECTIVE +} + +< DEFAULT, TERM, DIRECTIVE_ARGUMENTS > MORE : { + "<" { pushState(); } : ABSOLUTE_IRI +} + +< DEFAULT, BODY, TERM, DIRECTIVE_ARGUMENTS > TOKEN : { + < VARORPREDNAME : < A2Z> (< A2ZN >)* > + | < #A2Z : [ "a"-"z", "A"-"Z" ] > + | < #A2ZN : [ "a"-"z", "A"-"Z", "0"-"9" ] > + | < PNAME_LN : (< PN_PREFIX >)? ":" < PN_LOCAL > > + | < PNAME_NS : < PN_PREFIX > ":" > + | < #PN_CHARS_BASE : [ "a"-"z", "A"-"Z", "\u00c0"-"\u00d6", + "\u00d8"-"\u00f6", "\u00f8"-"\u02ff", + "\u0370"-"\u037d", "\u037f"-"\u1fff", + "\u200c"-"\u200d", "\u2070"-"\u218f", + "\u2c00"-"\u2fef", "\u3001"-"\ud7ff", + "\uf900"-"\ufffd" ] > + | < #PN_CHARS_U : < PN_CHARS_BASE > | "_" > + | < #PN_CHARS : ( < PN_CHARS_U > | [ "-", "0"-"9", "\u00b7", + "\u0300"-"\u036f", + "\u203f"-"\u2040" ] ) > + | < #PN_PREFIX : < PN_CHARS_BASE > + ( ( < PN_CHARS > | "." )* < PN_CHARS > )? > + | < #PN_LOCAL : ( < PN_CHARS_U > | [ ":", "0"-"9" ] ) + ( ( < PN_CHARS > | [ ".", ":" ] )* < PN_CHARS > )? > + | < COMMA : "," > + | < LPAREN : "(" > { + pushState(); + + if (curLexState == DEFAULT || curLexState == BODY) { + SwitchTo(TERM); + } + } + | < RPAREN : ")" > { popState(); } } -TOKEN: -{ - < INTEGER : ([ "-", "+" ])? < DIGITS > > -| < DECIMAL : - ([ "-", "+" ])? - ( - (< DIGITS >)+ "." (< DIGITS >)* - | "." (< DIGITS >)+ - ) - > -| < DOUBLE : - ([ "+", "-" ])? - ( - ([ "0"-"9" ])+ "." ([ "0"-"9" ])* < EXPONENT > - | "." ([ "0"-"9" ])+ (< EXPONENT >) - | ([ "0"-"9" ])+ < EXPONENT > - ) - > -| < #DIGITS : ([ "0"-"9" ])+ > -| < #EXPONENT : [ "e", "E" ] ([ "+", "-" ])? ([ "0"-"9" ])+ > +< TERM, DIRECTIVE_ARGUMENTS > TOKEN : { + < INTEGER : (< SIGN >)? < DIGITS > > + | < DECIMAL : (< SIGN >)? ( < DIGITS > "." (< DIGIT >)* + | "." < DIGITS > ) > + | < DOUBLE : (< SIGN >)? ( < DIGITS > "." (< DIGIT >)* < EXPONENT > + | "." (< DIGITS >) (< EXPONENT >) + | < DIGITS > < EXPONENT > ) > + | < #SIGN : [ "+", "-" ] > + | < #DIGIT : [ "0"-"9" ] > + | < #DIGITS : (< DIGIT >)+ > + | < #EXPONENT : [ "e", "E" ] (< SIGN >)? < DIGITS > > + | < COLON : ":" > } -TOKEN: -{ - < STRING_LITERAL1 : - // Single quoted string - "'" - ( - (~[ "'", "\\", "\n", "\r" ]) - | < ECHAR > - )* - "'" > -| < STRING_LITERAL2 : - // Double quoted string - "\"" - ( - (~[ "\"", "\\", "\n", "\r" ]) - | < ECHAR > - )* - "\"" > -| < STRING_LITERAL_LONG1 : - "'''" - ( - ~[ "'", "\\" ] - | < ECHAR > - | ("'" ~[ "'" ]) - | ("''" ~[ "'" ]) - )* - "'''" > -| < STRING_LITERAL_LONG2 : - "\"\"\"" - ( - ~[ "\"", "\\" ] - | < ECHAR > - | ("\"" ~[ "\"" ]) - | ("\"\"" ~[ "\"" ]) - )* - "\"\"\"" > -| < #ECHAR : - "\\" - ( - "t" - | "b" - | "n" - | "r" - | "f" - | "\\" - | "\"" - | "'" - ) > -} - -TOKEN : -{ - // Includes # for relative URIs - < IRI : "<" (~[ ">", "<", "\"", "{", "}", "^", "\\", "|", "`", "\u0000"-"\u0020" ])* ">" > -| < PNAME_LN : (< PN_PREFIX >)? ":" < PN_LOCAL > > -| < PNAME_NS : < PN_PREFIX > ":" > -| < UNIVAR : < QMARK > < VARORPREDNAME > > -| < EXIVAR : < EMARK > < VARORPREDNAME > > -| < LANGTAG : - < AT > (< A2Z >)+ - ( - "-" (< A2ZN >)+ - )* > -| < VARORPREDNAME : < A2Z> (< A2ZN >)* > -| < #A2Z : [ "a"-"z", "A"-"Z" ] > -| < #A2ZN : [ "a"-"z", "A"-"Z", "0"-"9" ] > -| < DIRECTIVENAME : < A2Z > (< A2ZNX >)* > -| < #A2ZNX : [ "a"-"z", "A"-"Z", "0"-"9", "-", "_" ] > -} - -TOKEN : -{ - < LPAREN : "(" > -| < RPAREN : ")" > -| < LBRACK : "[" > -| < RBRACK : "]" > -| < COMMA : "," > -| < DOT : "." > -| < ARROW : ":-" > -| < QMARK : "?" > -| < EMARK : "!" > -| < TILDE : "~" > -| < COLON : ":" > -| < DATATYPE : "^^" > -| < AT : "@" > -} - -TOKEN : -{ - < #PN_CHARS_BASE : - [ "A"-"Z" ] - | [ "a"-"z" ] - | [ "\u00c0"-"\u00d6" ] - | [ "\u00d8"-"\u00f6" ] - | [ "\u00f8"-"\u02ff" ] - | [ "\u0370"-"\u037d" ] - | [ "\u037f"-"\u1fff" ] - | [ "\u200c"-"\u200d" ] - | [ "\u2070"-"\u218f" ] - | [ "\u2c00"-"\u2fef" ] - | [ "\u3001"-"\ud7ff" ] - | [ "\uf900"-"\ufffd" ] - > - // | [ ""#x10000-#xEFFFF] -| - < #PN_CHARS_U : - < PN_CHARS_BASE > - | "_" > -| < #PN_CHARS : - ( - < PN_CHARS_U > - | "-" - | [ "0"-"9" ] - | "\u00b7" - | [ "\u0300"-"\u036f" ] - | [ "\u203f"-"\u2040" ] - ) > -| < #PN_PREFIX : - < PN_CHARS_BASE > - ( - ( - < PN_CHARS > - | "." - )* - < PN_CHARS > - )? > -| < #PN_LOCAL : - ( - < PN_CHARS_U > - | ":" - | [ "0"-"9" ] - ) - ( - ( - < PN_CHARS > - | "." - | ":" - )* - < PN_CHARS > - )? > +TOKEN : { + < ARROW : ":-" > : BODY } -TOKEN : -{ - < PIPE_DELIMINATED_LITERAL : ( "|" (~[ "|" ])* "|" ) > : DEFAULT -| < HASH_DELIMINATED_LITERAL : ( "#" (~[ "#" ])* "#" ) > : DEFAULT -// | < BRACKET_DELIMINATED_LITERAL : ( "[" (~[ "]" ])* "]" ) > : DEFAULT -| < BRACE_DELIMINATED_LITERAL : ( "{" (~[ "}" ])* "}" ) > : DEFAULT +< DEFAULT, BODY > TOKEN : { + < TILDE : "~" > } + +< ABSOLUTE_IRI > TOKEN : { + < IRI_ABSOLUTE : (~[ ">", "<", "\"", "{", "}", "^", "\\", "|", "`", "\u0000"-"\u0020" ])* ">" > { + matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); + popState(); + } +} + +< DIRECTIVE > TOKEN : { + < BASE : "base" > : DIRECTIVE_ARGUMENTS + | < PREFIX : "prefix" > : DIRECTIVE_ARGUMENTS + | < SOURCE : "source" > : DIRECTIVE_ARGUMENTS + | < CUSTOM : < DIRECTIVENAME > > : DIRECTIVE_ARGUMENTS + | < DIRECTIVENAME : [ "a"-"z", "A"-"Z" ] ([ "a"-"z", "A"-"Z", "0"-"9", "-", "_" ])* > +} + +< DEFAULT, BODY, DIRECTIVE_ARGUMENTS > TOKEN : { + < DOT : "." > : DEFAULT +} + +< DIRECTIVE_ARGUMENTS > TOKEN : { + < ARITY : "[" < INTEGER > "]" > { + matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); + } + | < ARGUMENT_NAME : < DIRECTIVENAME > > +} + +< TERM > TOKEN : { + < UNIVAR : "?" < VARORPREDNAME > > + | < EXIVAR : "!" < VARORPREDNAME > > + | < LANGTAG : "@" ( < A2Z > )+ ( "-" ( < A2ZN > )+ )? > + | < DATATYPE : "^^" > + | < IRI : "<" (~[ ">", "<", "\"", "{", "}", "^", "\\", "|", "`", "\u0000"-"\u0020" ])* ">" > +} + +< TERM, DIRECTIVE_ARGUMENTS > MORE : { + < "'" > { pushState(); } : SINGLE_QUOTED + | < "\"" > { pushState(); } : DOUBLE_QUOTED + | < "'''" > { pushState(); }: TRIPLE_QUOTED + | < "\"\"\"" > { pushState(); } : SIXFOLD_QUOTED +} + +< SINGLE_QUOTED > TOKEN : { + < SINGLE_QUOTED_STRING : ( ~[ "'", "\\", "\n", "\r" ] + | < ESCAPE_SEQUENCE > )* "'" > { popState(); } +} + +< DOUBLE_QUOTED > TOKEN : { + < DOUBLE_QUOTED_STRING : ( ~[ "\"", "\\", "\n", "\r" ] + | < ESCAPE_SEQUENCE > )* "\"" > { popState(); } +} + +< TRIPLE_QUOTED > TOKEN : { + < TRIPLE_QUOTED_STRING : ( ~[ "'", "\\" ] + | < ESCAPE_SEQUENCE > + | ( "'" ~[ "'" ] ) + | ( "''" ~[ "'" ] ) )* "'''" > { popState(); } +} + +< SIXFOLD_QUOTED > TOKEN : { + < SIXFOLD_QUOTED_STRING : ( ~[ "\"", "\\" ] + | < ESCAPE_SEQUENCE > + | ( "\"" ~[ "\"" ] ) + | ( "\"\"" ~[ "\"" ] ) )* "\"\"\"" > { popState(); } +} + +< SINGLE_QUOTED, DOUBLE_QUOTED, TRIPLE_QUOTED, SIXFOLD_QUOTED > MORE : { + < ESCAPE_SEQUENCE : "\\" [ "t", "b", "n", "r", "f", "\\", "\"", "'" ] > +} + +// TOKEN : +// { +// < PIPE_DELIMINATED_LITERAL : ( "|" (~[ "|" ])* "|" ) > : DEFAULT +// | < HASH_DELIMINATED_LITERAL : ( "#" (~[ "#" ])* "#" ) > : DEFAULT +// // | < BRACKET_DELIMINATED_LITERAL : ( "[" (~[ "]" ])* "]" ) > : DEFAULT +// | < BRACE_DELIMINATED_LITERAL : ( "{" (~[ "}" ])* "}" ) > : DEFAULT +// } From e3fb878830e2129772471f13cfe5e096e66609c9 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 29 Jan 2020 17:08:37 +0100 Subject: [PATCH 0521/1003] Parser: Simplify constant handling, support configurable literals --- .../vlog4j/parser/ParserConfiguration.java | 19 +- ...eryResultDataSourceDeclarationHandler.java | 12 +- .../vlog4j/parser/javacc/JavaCCParser.jj | 423 ++++++++---------- .../parser/javacc/JavaCCParserBase.java | 17 +- .../parser/ParserConfigurationTest.java | 7 - .../RuleParserConfigurableLiteralTest.java | 55 ++- 6 files changed, 251 insertions(+), 282 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index 3e89c30ea..096a1a06c 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -106,32 +106,17 @@ public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(final Strin } /** - * Parse a constant with optional data type and language tag. + * Parse a constant with optional data type. * * @param lexicalForm the (unescaped) lexical form of the constant. * @param languageTag the language tag, or null if not present. * @param the datatype, or null if not present. - * @pre At most one of {@code languageTag} and {@code datatype} may be non-null. * * @throws ParsingException when the lexical form is invalid for the * given data type. - * @throws IllegalArgumentException when both {@code languageTag} and - * {@code datatype} are non-null. * @return the {@link Constant} corresponding to the given arguments. */ - public Constant parseConstant(final String lexicalForm, final String languageTag, final String datatype) - throws ParsingException, IllegalArgumentException { - Validate.isTrue((languageTag == null) || (datatype == null), - "A constant with a language tag may not explicitly specify a data type."); - - if (languageTag != null) { - return Expressions.makeLanguageStringConstant(lexicalForm, languageTag); - } else { - return this.parseDatatypeConstant(lexicalForm, datatype); - } - } - - private Constant parseDatatypeConstant(final String lexicalForm, final String datatype) throws ParsingException { + public Constant parseDatatypeConstant(final String lexicalForm, final String datatype) throws ParsingException { final String type = ((datatype != null) ? datatype : PrefixDeclarations.XSD_STRING); final DatatypeConstantHandler handler = this.datatypes.get(type); diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java index bfed050f4..b49683115 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java @@ -47,13 +47,11 @@ public DataSource handleDeclaration(List arguments, final SubParserFacto String endpoint = arguments.get(0); JavaCCParser parser = subParserFactory.makeSubParser(endpoint); String parsedEndpoint; - // try { - /// @TODO: actually make sure that this is a valid IRI - parsedEndpoint = endpoint.substring(1, endpoint.length() - 1); - //parsedEndpoint = parser.quotedIri(); - // } catch (ParseException | PrefixDeclarationException e) { - // throw new ParsingException("Error while parsing endpoint IRI in SPARQL query data source: " + e.getMessage(), e); - // } + try { + parsedEndpoint = parser.absoluteIri(); + } catch (ParseException | PrefixDeclarationException e) { + throw new ParsingException("Error while parsing endpoint IRI in SPARQL query data source: " + e.getMessage(), e); + } URL endpointUrl; try { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index c90c5e5da..810cd61d1 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -116,31 +116,27 @@ String quotedIri() throws PrefixDeclarationException : { iri = absoluteIri() { return "<" + iri + ">"; } } -void source() throws PrefixDeclarationException: -{ +void source() throws PrefixDeclarationException : { String predicateName; DataSource dataSource; Token arity; -} -{ +} { < SOURCE > predicateName = predicateName() arity = < ARITY > < COLON > dataSource = dataSource() < DOT > { int nArity; - nArity = Integer.parseInt(arity.image); // Do not catch NumberFormatException: < INTEGER > matches must parse as int in Java! + nArity = Integer.parseInt(arity.image); addDataSource(predicateName, nArity, dataSource); } } -DataSource dataSource() throws PrefixDeclarationException: -{ +DataSource dataSource() throws PrefixDeclarationException : { Token sourceName; List< String > arguments; -} -{ - (sourceName = < ARGUMENT_NAME > | sourceName = < VARORPREDNAME >) < LPAREN > arguments = Arguments() < RPAREN > - { +} { + ( sourceName = < ARGUMENT_NAME > + | sourceName = < VARORPREDNAME > ) < LPAREN > arguments = Arguments() < RPAREN > { try { return parserConfiguration.parseDataSourceSpecificPartOfDataSourceDeclaration(sourceName.image, arguments, getSubParserFactory()); } catch (ParsingException e) { @@ -149,27 +145,19 @@ DataSource dataSource() throws PrefixDeclarationException: } } -void statement() throws PrefixDeclarationException: -{ +void statement() throws PrefixDeclarationException : { Statement statement; resetVariableSets(); -} -{ - LOOKAHEAD(rule()) statement = rule() { knowledgeBase.addStatement(statement);} -| statement = fact(FormulaContext.HEAD) //not from a rule - { - knowledgeBase.addStatement(statement); - } +} { + ( LOOKAHEAD(rule()) statement = rule() + | statement = fact(FormulaContext.HEAD) ) { knowledgeBase.addStatement(statement); } } -Rule rule() throws PrefixDeclarationException: -{ +Rule rule() throws PrefixDeclarationException : { List < PositiveLiteral > head; List < Literal > body; -} -{ - head = listOfPositiveLiterals(FormulaContext.HEAD) < ARROW > body = listOfLiterals(FormulaContext.BODY) < DOT > - { +} { + head = listOfPositiveLiterals(FormulaContext.HEAD) < ARROW > body = listOfLiterals(FormulaContext.BODY) < DOT > { // check that the intersection between headExiVars and BodyVars is empty for (String variable : headExiVars) { if (bodyVars.contains(variable)) @@ -186,57 +174,49 @@ Rule rule() throws PrefixDeclarationException: } } -List < PositiveLiteral > listOfPositiveLiterals(FormulaContext context) throws PrefixDeclarationException: -{ +List < PositiveLiteral > listOfPositiveLiterals(FormulaContext context) throws PrefixDeclarationException : { PositiveLiteral l; List < PositiveLiteral > list = new ArrayList < PositiveLiteral > (); -} -{ - l = positiveLiteral(context) { list.add(l); } - ( < COMMA > l = positiveLiteral(context) { list.add(l); } )* - { return list; } +} { + l = positiveLiteral(context) { list.add(l); } ( < COMMA > l = positiveLiteral(context) { list.add(l); } )* { + return list; + } } -List < Literal > listOfLiterals(FormulaContext context) throws PrefixDeclarationException: -{ +List < Literal > listOfLiterals(FormulaContext context) throws PrefixDeclarationException : { Literal l; List < Literal > list = new ArrayList < Literal > (); -} -{ - l = literal(context) { list.add(l); } - ( < COMMA > l = literal(context) { list.add(l); } )* - { return list; } +} { + l = literal(context) { list.add(l); } ( < COMMA > l = literal(context) { list.add(l); } )* { + return list; + } } -Literal literal(FormulaContext context) throws PrefixDeclarationException: -{ - Literal l = null; -} -{ - l = positiveLiteral(context) { return l; } -| l = negativeLiteral(context) { return l; } +Literal literal(FormulaContext context) throws PrefixDeclarationException : { + Literal l; +} { + ( l = positiveLiteral(context) + | l = negativeLiteral(context) ) { + return l; + } } -PositiveLiteral positiveLiteral(FormulaContext context) throws PrefixDeclarationException: -{ +PositiveLiteral positiveLiteral(FormulaContext context) throws PrefixDeclarationException : { Token t; List < Term > terms; String predicateName; -} -{ - predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > - { return Expressions.makePositiveLiteral(predicateName, terms); } +} { + predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > { + return Expressions.makePositiveLiteral(predicateName, terms); + } } -Fact fact(FormulaContext context) throws PrefixDeclarationException: -{ +Fact fact(FormulaContext context) throws PrefixDeclarationException : { Token t; List < Term > terms; String predicateName; -} -{ - predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > < DOT > - { +} { + predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > < DOT > { try { return Expressions.makeFact(predicateName, terms); } catch (IllegalArgumentException e) { @@ -245,25 +225,22 @@ Fact fact(FormulaContext context) throws PrefixDeclarationException: } } -NegativeLiteral negativeLiteral(FormulaContext context) throws PrefixDeclarationException: -{ +NegativeLiteral negativeLiteral(FormulaContext context) throws PrefixDeclarationException : { List < Term > terms; String predicateName; -} -{ - < TILDE > predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > - { return Expressions.makeNegativeLiteral(predicateName, terms); } +} { + < TILDE > predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > { + return Expressions.makeNegativeLiteral(predicateName, terms); + } } -List < Term > listOfTerms(FormulaContext context) throws PrefixDeclarationException: -{ +List < Term > listOfTerms(FormulaContext context) throws PrefixDeclarationException : { Term t; List < Term > list = new ArrayList < Term > (); -} -{ - t = term(context) { list.add(t); } - ( < COMMA > t = term(context) { list.add(t); } )* - { return list; } +} { + t = term(context) { list.add(t); } ( < COMMA > t = term(context) { list.add(t); } )* { + return list; + } } String predicateName() throws PrefixDeclarationException : { @@ -274,18 +251,16 @@ String predicateName() throws PrefixDeclarationException : { | t = < VARORPREDNAME > { return prefixDeclarations.absolutize(t.image); } } -Term term(FormulaContext context) throws PrefixDeclarationException: -{ +Term term(FormulaContext context) throws PrefixDeclarationException : { Token t; String s; Constant c; -} -{ - s = IRI(false) { return createConstant(s); } -| c = NumericLiteral() { return c; } -| c = RDFLiteral() { return c; } -| t = < UNIVAR > - { +} { + s = absoluteIri() { return createConstant(s); } + | t = < VARORPREDNAME > { return createConstant(t.image); } + | c = NumericLiteral() { return c; } + | c = RDFLiteral() { return c; } + | t = < UNIVAR > { s = t.image.substring(1); if (context == FormulaContext.HEAD) headUniVars.add(s); @@ -293,8 +268,7 @@ Term term(FormulaContext context) throws PrefixDeclarationException: bodyVars.add(s); return Expressions.makeUniversalVariable(s); } -| t = < EXIVAR > - { + | t = < EXIVAR > { s = t.image.substring(1); if (context == FormulaContext.HEAD) headExiVars.add(s); @@ -302,158 +276,96 @@ Term term(FormulaContext context) throws PrefixDeclarationException: throw new ParseException("Existentialy quantified variables can not appear in the body. Line: " + t.beginLine + ", Column: "+ t.beginColumn); return Expressions.makeExistentialVariable(s); } -| t = < VARORPREDNAME > { return createConstant(t.image); } -// | try { -// c = ConfigurableLiteral () { return c; } -// } catch (ParsingException e) { -// throw new ParseException("Invalid configurable literal expression: " + e.getMessage()); -// } + | try { + c = ConfigurableLiteral () { return c; } + } catch (ParsingException e) { + throw new ParseException("Invalid configurable literal expression: " + e.getMessage()); + } } -/** [16] */ -Constant NumericLiteral() : -{ +Constant NumericLiteral() : { Token t; -} -{ +} { t = < INTEGER > { return createConstant(t.image, PrefixDeclarations.XSD_INTEGER); } -| t = < DECIMAL > { return createConstant(t.image, PrefixDeclarations.XSD_DECIMAL); } -| t = < DOUBLE > { return createConstant(t.image, PrefixDeclarations.XSD_DOUBLE); } + | t = < DECIMAL > { return createConstant(t.image, PrefixDeclarations.XSD_DECIMAL); } + | t = < DOUBLE > { return createConstant(t.image, PrefixDeclarations.XSD_DOUBLE); } } -Constant RDFLiteral() throws PrefixDeclarationException: -{ - String lex = null; - String lang = null; // Optional lang tag and datatype. +Constant RDFLiteral() throws PrefixDeclarationException : { + String lex; + Token lang = null; // Optional lang tag and datatype. String dt = null; -} -{ - lex = String() ( lang = Langtag() | < DATATYPE > dt = IRI(false) )? - { return createConstant(lex, lang, dt); } -} - -// Constant ConfigurableLiteral() throws ParsingException: -// { -// Token t; -// } -// { -// ( LOOKAHEAD( < PIPE_DELIMINATED_LITERAL >, -// { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.PIPE) } ) -// t = < PIPE_DELIMINATED_LITERAL > { -// return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.PIPE, -// stripDelimiters(t.image, 1), -// getSubParserFactory()); -// } -// | LOOKAHEAD( < HASH_DELIMINATED_LITERAL >, -// { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.HASH) } ) -// t = < HASH_DELIMINATED_LITERAL > { -// return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.HASH, -// stripDelimiters(t.image, 1), -// getSubParserFactory()); -// } -// // | LOOKAHEAD( < BRACKET_DELIMINATED_LITERAL >, -// // { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.BRACKET) } ) -// // t = < BRACKET_DELIMINATED_LITERAL > { -// // return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACKET, -// // stripDelimiters(t.image, 1), -// // getSubParserFactory()); -// // } -// | LOOKAHEAD( < BRACE_DELIMINATED_LITERAL >, -// { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.BRACE) } ) -// t = < BRACE_DELIMINATED_LITERAL > { -// return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACE, -// stripDelimiters(t.image, 1), -// getSubParserFactory()); -// } -// ) -// } - -String Langtag() : -{ - Token t; -} -{ - // Enumerate the directives here because they look like language tags. - ( - t = < LANGTAG > - ) - { - String lang = stripChars(t.image, 1); - return lang; - } +} { + lex = String() ( lang = < LANGTAG > | < DATATYPE > dt = absoluteIri() )? { + if (lang != null) { + return Expressions.makeLanguageStringConstant(lex, lang.image); + } + return createConstant(lex, dt); + } } -String String(): -{ +Constant ConfigurableLiteral() throws ParsingException : { + Token t; +} { + ( LOOKAHEAD( < PIPE_DELIMITED_LITERAL >, + { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.PIPE) } ) + t = < PIPE_DELIMITED_LITERAL > { + return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.PIPE, t.image, + getSubParserFactory()); + } + | LOOKAHEAD( < HASH_DELIMITED_LITERAL >, + { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.HASH) } ) + t = < HASH_DELIMITED_LITERAL > { + return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.HASH, t.image, + getSubParserFactory()); + } + | LOOKAHEAD( < PAREN_DELIMITED_LITERAL >, + { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.PAREN) } ) + t = < PAREN_DELIMITED_LITERAL > { + return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.PAREN, t.image, + getSubParserFactory()); + } + | LOOKAHEAD( < BRACE_DELIMITED_LITERAL >, + { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.BRACE) } ) + t = < BRACE_DELIMITED_LITERAL > { + return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACE, t.image, + getSubParserFactory()); + } + | LOOKAHEAD( < BRACKET_DELIMITED_LITERAL >, + { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.BRACKET) } ) + t = < BRACKET_DELIMITED_LITERAL > { + return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACKET, t.image, + getSubParserFactory()); + } + + ) +} + +String String() : { Token t; - String lex; -} -{ - ( t = < SINGLE_QUOTED_STRING > { lex = stripDelimiters(t.image, 1); } - | t = < DOUBLE_QUOTED_STRING > { lex = stripDelimiters(t.image, 1); } - | t = < TRIPLE_QUOTED_STRING > { lex = stripDelimiters(t.image, 3); } - | t = < SIXFOLD_QUOTED_STRING > { lex = stripDelimiters(t.image, 3); } - ) - { - lex = unescapeStr(lex, t.beginLine, t.beginColumn); - return lex; - } +} { + ( t = < SINGLE_QUOTED_STRING > + | t = < DOUBLE_QUOTED_STRING > + | t = < TRIPLE_QUOTED_STRING > + | t = < SIXFOLD_QUOTED_STRING > + ) { return unescapeStr(t.image, t.beginLine, t.beginColumn); } } -LinkedList< String > Arguments() throws PrefixDeclarationException: -{ +LinkedList< String > Arguments() throws PrefixDeclarationException : { String str; LinkedList< String > rest = new LinkedList< String >(); -} -{ - (str = String() | str = quotedIri()) [< COMMA > rest = Arguments()] - { +} { + ( str = String() + | str = quotedIri()) [< COMMA > rest = Arguments()] { rest.addFirst(str); return rest; } } -String IRI(boolean includeAngleBrackets) throws PrefixDeclarationException: -{ - String iri; -} -{ - ( - iri = IRIREF() - | iri = PrefixedName() - ) - { - String result = prefixDeclarations.absolutize(iri); - if (includeAngleBrackets) { - result = "<"+result+">"; - } - return result; - } -} - -String PrefixedName() throws PrefixDeclarationException: -{ - Token t; -} -{ - //( - t = < PNAME_LN > - //| t = < PNAME_NS > - //) - { return prefixDeclarations.resolvePrefixedName(t.image);} - //{ return prefixDeclarations.resolvePrefixedName(t.image, t.beginLine, t.beginColumn);} -} - -String IRIREF() : -{ +String PrefixedName() throws PrefixDeclarationException : { Token t; -} -{ - t = < IRI > - { - // we remove '<' and '>' - return stripDelimiters(t.image, 1); - } +} { + t = < PNAME_LN > { return prefixDeclarations.resolvePrefixedName(t.image); } } // ------------------------------------------ @@ -468,7 +380,6 @@ String IRIREF() : < COMMENT : "%" ( ~[ "\n" ] )* "\n" > } -// ------------------------------------------ MORE : { "@": DIRECTIVE } @@ -498,14 +409,17 @@ MORE : { | < #PN_LOCAL : ( < PN_CHARS_U > | [ ":", "0"-"9" ] ) ( ( < PN_CHARS > | [ ".", ":" ] )* < PN_CHARS > )? > | < COMMA : "," > - | < LPAREN : "(" > { + | < RPAREN : ")" > { popState(); } +} + +< DEFAULT, BODY , DIRECTIVE_ARGUMENTS > TOKEN : { + < LPAREN : "(" > { pushState(); if (curLexState == DEFAULT || curLexState == BODY) { SwitchTo(TERM); } } - | < RPAREN : ")" > { popState(); } } < TERM, DIRECTIVE_ARGUMENTS > TOKEN : { @@ -559,9 +473,10 @@ TOKEN : { < TERM > TOKEN : { < UNIVAR : "?" < VARORPREDNAME > > | < EXIVAR : "!" < VARORPREDNAME > > - | < LANGTAG : "@" ( < A2Z > )+ ( "-" ( < A2ZN > )+ )? > + | < LANGTAG : "@" ( < A2Z > )+ ( "-" ( < A2ZN > )+ )? > { + matchedToken.image = JavaCCParserBase.stripChars(matchedToken.image, 1); + } | < DATATYPE : "^^" > - | < IRI : "<" (~[ ">", "<", "\"", "{", "}", "^", "\\", "|", "`", "\u0000"-"\u0020" ])* ">" > } < TERM, DIRECTIVE_ARGUMENTS > MORE : { @@ -569,40 +484,84 @@ TOKEN : { | < "\"" > { pushState(); } : DOUBLE_QUOTED | < "'''" > { pushState(); }: TRIPLE_QUOTED | < "\"\"\"" > { pushState(); } : SIXFOLD_QUOTED + | < "|" > { pushState(); } : PIPE_DELIMITED + | < "#" > { pushState(); } : HASH_DELIMITED + | < < LPAREN > > { pushState(); } : PAREN_DELIMITED + | < "{" > { pushState(); } : BRACE_DELIMITED + | < "[" > { pushState(); } : BRACKET_DELIMITED +} + +< PIPE_DELIMITED > TOKEN : { + < PIPE_DELIMITED_LITERAL : ( ~ [ "|" ] )* "|" > { + popState(); + matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); + } +} + +< HASH_DELIMITED > TOKEN : { + < HASH_DELIMITED_LITERAL : ( ~ [ "#" ] )* "#" > { + popState(); + matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); + } +} + +< PAREN_DELIMITED > TOKEN : { + < PAREN_DELIMITED_LITERAL : ( ~ [ ")" ] )* ")" > { + popState(); + matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); + } +} + +< BRACE_DELIMITED > TOKEN : { + < BRACE_DELIMITED_LITERAL : ( ( ~ [ "}" ] | [ "}" ] ) )* "}" > { + popState(); + matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); + } +} + +< BRACKET_DELIMITED > TOKEN : { + < BRACKET_DELIMITED_LITERAL : ( ( ~ [ "]" ] | [ ")" ] ) )* "]" > { + popState(); + matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); + } } < SINGLE_QUOTED > TOKEN : { < SINGLE_QUOTED_STRING : ( ~[ "'", "\\", "\n", "\r" ] - | < ESCAPE_SEQUENCE > )* "'" > { popState(); } + | < ESCAPE_SEQUENCE > )* "'" > { + popState(); + matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); + } } < DOUBLE_QUOTED > TOKEN : { < DOUBLE_QUOTED_STRING : ( ~[ "\"", "\\", "\n", "\r" ] - | < ESCAPE_SEQUENCE > )* "\"" > { popState(); } + | < ESCAPE_SEQUENCE > )* "\"" > { + popState(); + matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); + } } < TRIPLE_QUOTED > TOKEN : { < TRIPLE_QUOTED_STRING : ( ~[ "'", "\\" ] | < ESCAPE_SEQUENCE > | ( "'" ~[ "'" ] ) - | ( "''" ~[ "'" ] ) )* "'''" > { popState(); } + | ( "''" ~[ "'" ] ) )* "'''" > { + popState(); + matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 3); + } } < SIXFOLD_QUOTED > TOKEN : { < SIXFOLD_QUOTED_STRING : ( ~[ "\"", "\\" ] | < ESCAPE_SEQUENCE > | ( "\"" ~[ "\"" ] ) - | ( "\"\"" ~[ "\"" ] ) )* "\"\"\"" > { popState(); } + | ( "\"\"" ~[ "\"" ] ) )* "\"\"\"" > { + popState(); + matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 3); + } } < SINGLE_QUOTED, DOUBLE_QUOTED, TRIPLE_QUOTED, SIXFOLD_QUOTED > MORE : { < ESCAPE_SEQUENCE : "\\" [ "t", "b", "n", "r", "f", "\\", "\"", "'" ] > } - -// TOKEN : -// { -// < PIPE_DELIMINATED_LITERAL : ( "|" (~[ "|" ])* "|" ) > : DEFAULT -// | < HASH_DELIMINATED_LITERAL : ( "#" (~[ "#" ])* "#" ) > : DEFAULT -// // | < BRACKET_DELIMINATED_LITERAL : ( "[" (~[ "]" ])* "]" ) > : DEFAULT -// | < BRACE_DELIMINATED_LITERAL : ( "{" (~[ "}" ])* "}" ) > : DEFAULT -// } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index f55801423..436c49f8f 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -109,13 +109,17 @@ public enum ConfigurableLiteralDelimiter { */ HASH, /** - * Literals of the form {@code […]} + * Literals of the form {@code (…)} */ - BRACKET, + PAREN, /** * Literals of the form {@code {…}} */ BRACE, + /** + * Literals of the form {@code […]} + */ + BRACKET, } public JavaCCParserBase() { @@ -134,21 +138,16 @@ AbstractConstant createConstant(String lexicalForm) throws ParseException { return Expressions.makeAbstractConstant(absoluteIri); } - Constant createConstant(String lexicalForm, String datatype) throws ParseException { - return createConstant(lexicalForm, null, datatype); - } - /** * Creates a suitable {@link Constant} from the parsed data. * * @param string the string data (unescaped) - * @param languageTag the language tag, or null if not present * @param datatype the datatype, or null if not provided * @return suitable constant */ - Constant createConstant(String lexicalForm, String languageTag, String datatype) throws ParseException { + Constant createConstant(String lexicalForm, String datatype) throws ParseException { try { - return parserConfiguration.parseConstant(lexicalForm, languageTag, datatype); + return parserConfiguration.parseDatatypeConstant(lexicalForm, datatype); } catch (ParsingException e) { throw makeParseExceptionWithCause("Failed to parse Constant", e); } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/ParserConfigurationTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/ParserConfigurationTest.java index d509fe7f4..f89c5f012 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/ParserConfigurationTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/ParserConfigurationTest.java @@ -63,11 +63,4 @@ public void registerDatatype_dataSourceName_succeeds() { parserConfiguration.registerDataSource(SOURCE_NAME, dataSourceDeclarationHandler).registerDatatype(SOURCE_NAME, datatypeConstantHandler); } - - @Test(expected = IllegalArgumentException.class) - public void parseConstant_languageTagWithExplictDatatype_throws() throws ParsingException { - ParserConfiguration parserConfiguration = new ParserConfiguration(); - parserConfiguration.parseConstant("test", "test", "test"); - } - } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java index ff6de9d2e..fa58b9d5a 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java @@ -44,17 +44,20 @@ public class RuleParserConfigurableLiteralTest { public static final Constant pipeConstant = Expressions.makeAbstractConstant("testPipe"); public static final Constant hashConstant = Expressions.makeAbstractConstant("testHash"); - public static final Constant bracketConstant = Expressions.makeAbstractConstant("testBracket"); + public static final Constant parenConstant = Expressions.makeAbstractConstant("testParen"); public static final Constant braceConstant = Expressions.makeAbstractConstant("testBrace"); + public static final Constant bracketConstant = Expressions.makeAbstractConstant("testBracket"); public static final ConfigurableLiteralHandler pipeHandler = getMockLiteralHandler( ConfigurableLiteralDelimiter.PIPE, pipeConstant); public static final ConfigurableLiteralHandler hashHandler = getMockLiteralHandler( ConfigurableLiteralDelimiter.HASH, hashConstant); - public static final ConfigurableLiteralHandler bracketHandler = getMockLiteralHandler( - ConfigurableLiteralDelimiter.BRACKET, bracketConstant); + public static final ConfigurableLiteralHandler parenHandler = getMockLiteralHandler( + ConfigurableLiteralDelimiter.PAREN, parenConstant); public static final ConfigurableLiteralHandler braceHandler = getMockLiteralHandler( ConfigurableLiteralDelimiter.BRACE, braceConstant); + public static final ConfigurableLiteralHandler bracketHandler = getMockLiteralHandler( + ConfigurableLiteralDelimiter.BRACKET, bracketConstant); @Test(expected = ParsingException.class) public void testNoDefaultPipeLiteral() throws ParsingException { @@ -93,12 +96,11 @@ public void testCustomHashLiteral() throws ParsingException { } @Test - @Ignore - public void testCustomBracketLiteral() throws ParsingException { + public void testCustomParenLiteral() throws ParsingException { ParserConfiguration parserConfiguration = new ParserConfiguration(); - parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, bracketHandler); - Literal result = RuleParser.parseLiteral("p([test])", parserConfiguration); - assertEquals(bracketConstant, result.getConstants().toArray()[0]); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, parenHandler); + Literal result = RuleParser.parseLiteral("p((test))", parserConfiguration); + assertEquals(parenConstant, result.getConstants().toArray()[0]); } @Test @@ -110,14 +112,21 @@ public void testCustomBraceLiteral() throws ParsingException { } @Test - @Ignore + public void testCustomBracketLiteral() throws ParsingException { + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, bracketHandler); + Literal result = RuleParser.parseLiteral("p([test])", parserConfiguration); + assertEquals(bracketConstant, result.getConstants().toArray()[0]); + } + + @Test public void testMixedCustomLiterals() throws ParsingException { ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler) .registerLiteral(ConfigurableLiteralDelimiter.HASH, hashHandler) .registerLiteral(ConfigurableLiteralDelimiter.BRACKET, bracketHandler) .registerLiteral(ConfigurableLiteralDelimiter.BRACE, braceHandler); - Literal result = RuleParser.parseLiteral("p(||, #test#, [[], {})", parserConfiguration); + Literal result = RuleParser.parseLiteral("p(||, #test#, [], {})", parserConfiguration); List constants = result.getConstants().collect(Collectors.toList()); List expected = new ArrayList<>( Arrays.asList(pipeConstant, hashConstant, bracketConstant, braceConstant)); @@ -135,6 +144,32 @@ public void testNonTrivialCustomPipeLiteral() throws ParsingException { assertEquals(makeReversedConstant(label), result.getConstants().toArray()[0]); } + @Test + public void testNestedBraceLiteral() throws ParsingException { + String label = "this is a test, do not worry."; + String input = "p({{" + label + "}})"; + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, + (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + Literal result = RuleParser.parseLiteral(input, parserConfiguration); + assertEquals(makeReversedConstant("{" + label + "}"), result.getConstants().toArray()[0]); + } + + @Test + public void testMixedAndNestedCustomLiterals() throws ParsingException { + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler) + .registerLiteral(ConfigurableLiteralDelimiter.HASH, hashHandler) + .registerLiteral(ConfigurableLiteralDelimiter.PAREN, parenHandler) + .registerLiteral(ConfigurableLiteralDelimiter.BRACE, braceHandler) + .registerLiteral(ConfigurableLiteralDelimiter.BRACKET, bracketHandler); + Literal result = RuleParser.parseLiteral("p(|{}|, #test#, [|test, #test#, test|], ([], {}, [{[{}]}]))", parserConfiguration); + List constants = result.getConstants().collect(Collectors.toList()); + List expected = new ArrayList<>( + Arrays.asList(pipeConstant, hashConstant, bracketConstant, parenConstant)); + assertEquals(expected, constants); + } + static Constant makeReversedConstant(String name) { StringBuilder builder = new StringBuilder(name); return Expressions.makeAbstractConstant(builder.reverse().toString()); From 5964d523e557116709046df32e5a51b53b3c97c9 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 29 Jan 2020 17:38:36 +0100 Subject: [PATCH 0522/1003] Parser: Separate JavaCCParserBase internals from parsing code --- .../vlog4j/parser/javacc/JavaCCParser.jj | 53 ++++++++---------- .../parser/javacc/JavaCCParserBase.java | 55 +++++++++++++++++-- .../parser/javacc/SubParserFactory.java | 12 ++-- 3 files changed, 79 insertions(+), 41 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 810cd61d1..68c5a8b7e 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -90,7 +90,7 @@ void base() throws PrefixDeclarationException : { Token iri; } { < BASE > iri = < IRI_ABSOLUTE > < DOT > { - prefixDeclarations.setBase(iri.image); + setBase(iri.image); } } @@ -99,15 +99,15 @@ void prefix() throws PrefixDeclarationException : { String iri; } { < PREFIX > pn = < PNAME_NS > iri = absoluteIri() < DOT > { - prefixDeclarations.setPrefix(pn.image, iri); + setPrefix(pn.image, iri); } } String absoluteIri() throws PrefixDeclarationException : { Token iri; } { - iri = < IRI_ABSOLUTE > { return prefixDeclarations.absolutize(iri.image); } - | iri = < PNAME_LN > { return prefixDeclarations.resolvePrefixedName(iri.image); } + iri = < IRI_ABSOLUTE > { return absolutizeIri(iri.image); } + | iri = < PNAME_LN > { return resolvePrefixedName(iri.image); } } String quotedIri() throws PrefixDeclarationException : { @@ -137,11 +137,7 @@ DataSource dataSource() throws PrefixDeclarationException : { } { ( sourceName = < ARGUMENT_NAME > | sourceName = < VARORPREDNAME > ) < LPAREN > arguments = Arguments() < RPAREN > { - try { - return parserConfiguration.parseDataSourceSpecificPartOfDataSourceDeclaration(sourceName.image, arguments, getSubParserFactory()); - } catch (ParsingException e) { - throw makeParseExceptionWithCause("Failed while trying to parse the source-specific part of a data source declaration", e); - } + return parseDataSourceSpecificPartOfDataSourceDeclaration(sourceName.image, arguments, getSubParserFactory()); } } @@ -150,7 +146,9 @@ void statement() throws PrefixDeclarationException : { resetVariableSets(); } { ( LOOKAHEAD(rule()) statement = rule() - | statement = fact(FormulaContext.HEAD) ) { knowledgeBase.addStatement(statement); } + | statement = fact(FormulaContext.HEAD) ) { + addStatement(statement); + } } Rule rule() throws PrefixDeclarationException : { @@ -248,13 +246,14 @@ String predicateName() throws PrefixDeclarationException : { String s; } { s = absoluteIri() { return s; } - | t = < VARORPREDNAME > { return prefixDeclarations.absolutize(t.image); } + | t = < VARORPREDNAME > { return absolutizeIri(t.image); } } Term term(FormulaContext context) throws PrefixDeclarationException : { Token t; String s; Constant c; + Term tt; } { s = absoluteIri() { return createConstant(s); } | t = < VARORPREDNAME > { return createConstant(t.image); } @@ -277,7 +276,7 @@ Term term(FormulaContext context) throws PrefixDeclarationException : { return Expressions.makeExistentialVariable(s); } | try { - c = ConfigurableLiteral () { return c; } + tt = ConfigurableLiteral () { return tt; } } catch (ParsingException e) { throw new ParseException("Invalid configurable literal expression: " + e.getMessage()); } @@ -304,40 +303,34 @@ Constant RDFLiteral() throws PrefixDeclarationException : { } } -Constant ConfigurableLiteral() throws ParsingException : { +Term ConfigurableLiteral() throws ParsingException : { Token t; } { ( LOOKAHEAD( < PIPE_DELIMITED_LITERAL >, - { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.PIPE) } ) + { isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.PIPE) } ) t = < PIPE_DELIMITED_LITERAL > { - return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.PIPE, t.image, - getSubParserFactory()); + return parseConfigurableLiteral(ConfigurableLiteralDelimiter.PIPE, t.image, getSubParserFactory()); } | LOOKAHEAD( < HASH_DELIMITED_LITERAL >, - { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.HASH) } ) + { isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.HASH) } ) t = < HASH_DELIMITED_LITERAL > { - return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.HASH, t.image, - getSubParserFactory()); + return parseConfigurableLiteral(ConfigurableLiteralDelimiter.HASH, t.image, getSubParserFactory()); } | LOOKAHEAD( < PAREN_DELIMITED_LITERAL >, - { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.PAREN) } ) + { isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.PAREN) } ) t = < PAREN_DELIMITED_LITERAL > { - return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.PAREN, t.image, - getSubParserFactory()); + return parseConfigurableLiteral(ConfigurableLiteralDelimiter.PAREN, t.image, getSubParserFactory()); } | LOOKAHEAD( < BRACE_DELIMITED_LITERAL >, - { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.BRACE) } ) + { isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.BRACE) } ) t = < BRACE_DELIMITED_LITERAL > { - return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACE, t.image, - getSubParserFactory()); + return parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACE, t.image, getSubParserFactory()); } | LOOKAHEAD( < BRACKET_DELIMITED_LITERAL >, - { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.BRACKET) } ) + { isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.BRACKET) } ) t = < BRACKET_DELIMITED_LITERAL > { - return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACKET, t.image, - getSubParserFactory()); + return parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACKET, t.image, getSubParserFactory()); } - ) } @@ -365,7 +358,7 @@ LinkedList< String > Arguments() throws PrefixDeclarationException : { String PrefixedName() throws PrefixDeclarationException : { Token t; } { - t = < PNAME_LN > { return prefixDeclarations.resolvePrefixedName(t.image); } + t = < PNAME_LN > { return resolvePrefixedName(t.image); } } // ------------------------------------------ diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 436c49f8f..273c9f403 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -21,6 +21,7 @@ */ import java.util.HashSet; +import java.util.List; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.AbstractConstant; @@ -28,6 +29,8 @@ import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.Statement; +import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; @@ -52,10 +55,10 @@ * */ public class JavaCCParserBase { - protected PrefixDeclarations prefixDeclarations; + private PrefixDeclarations prefixDeclarations; - protected KnowledgeBase knowledgeBase; - protected ParserConfiguration parserConfiguration; + private KnowledgeBase knowledgeBase; + private ParserConfiguration parserConfiguration; /** * "Local" variable to remember (universal) body variables during parsing. @@ -153,6 +156,10 @@ Constant createConstant(String lexicalForm, String datatype) throws ParseExcepti } } + void addStatement(Statement statement) { + knowledgeBase.addStatement(statement); + } + void addDataSource(String predicateName, int arity, DataSource dataSource) throws ParseException { if (dataSource.getRequiredArity().isPresent()) { Integer requiredArity = dataSource.getRequiredArity().get(); @@ -163,7 +170,7 @@ void addDataSource(String predicateName, int arity, DataSource dataSource) throw } Predicate predicate = Expressions.makePredicate(predicateName, arity); - knowledgeBase.addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); + addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); } static String unescapeStr(String s, int line, int column) throws ParseException { @@ -296,11 +303,47 @@ public ParserConfiguration getParserConfiguration() { return parserConfiguration; } - protected void setPrefixDeclarations(PrefixDeclarations prefixDeclarations) { + void setPrefixDeclarations(PrefixDeclarations prefixDeclarations) { this.prefixDeclarations = prefixDeclarations; } - protected PrefixDeclarations getPrefixDeclarations() { + PrefixDeclarations getPrefixDeclarations() { return prefixDeclarations; } + + DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(String syntacticForm, + List arguments, SubParserFactory subParserFactory) throws ParseException { + try { + return parserConfiguration.parseDataSourceSpecificPartOfDataSourceDeclaration(syntacticForm, arguments, + subParserFactory); + } catch (ParsingException e) { + throw makeParseExceptionWithCause( + "Failed while trying to parse the source-specific part of a data source declaration", e); + } + } + + Term parseConfigurableLiteral(ConfigurableLiteralDelimiter delimiter, String syntacticForm, + SubParserFactory subParserFactory) throws ParsingException { + return parserConfiguration.parseConfigurableLiteral(delimiter, syntacticForm, subParserFactory); + } + + boolean isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter delimiter) { + return parserConfiguration.isConfigurableLiteralRegistered(delimiter); + } + + void setBase(String baseIri) throws PrefixDeclarationException { + prefixDeclarations.setBase(baseIri); + } + + void setPrefix(String prefixName, String baseIri) throws PrefixDeclarationException { + prefixDeclarations.setPrefix(prefixName, baseIri); + } + + String absolutizeIri(String iri) throws PrefixDeclarationException { + return prefixDeclarations.absolutize(iri); + } + + String resolvePrefixedName(String prefixedName) throws PrefixDeclarationException { + return prefixDeclarations.resolvePrefixedName(prefixedName); + } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java index c4a012baf..c92c4be83 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java @@ -30,9 +30,10 @@ import org.semanticweb.vlog4j.parser.RuleParser; /** - * Factory for creating a SubParser sharing configuration, state, and prefixes, - * but with an independent input stream, to be used, e.g., for parsing arguments - * in data source declarations. + * Factory for creating a SubParser sharing configuration, (semantic) + * state, and prefixes, but with an independent input stream, to be + * used, e.g., for parsing arguments in data source declarations. The + * parser will start in the {@code DEFAULT} lexical state. * * @author Maximilian Marx */ @@ -44,7 +45,8 @@ public class SubParserFactory { /** * Construct a SubParserFactory. * - * @param parser the parser instance to get the state from. + * @param parser the parser instance to get the (semantic) state + * from. */ SubParserFactory(final JavaCCParser parser) { this.knowledgeBase = parser.getKnowledgeBase(); @@ -53,7 +55,7 @@ public class SubParserFactory { } /** - * Create a new parser with the specified state and given input. + * Create a new parser with the specified (semantic) state and given input. * * @param inputStream the input stream to parse. * @param encoding encoding of the input stream. From c150f28bdeb8b060f216179a6d786f8a7a2f63b5 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 29 Jan 2020 20:48:21 +0100 Subject: [PATCH 0523/1003] Parser: Fix handling of nested configurable literals --- .../vlog4j/parser/javacc/JavaCCParser.jj | 11 +- .../RuleParserConfigurableLiteralTest.java | 102 ++++++++++++++++++ 2 files changed, 109 insertions(+), 4 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 68c5a8b7e..f9328127b 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -7,7 +7,7 @@ options STATIC = false; // DEBUG_PARSER = true; - // DEBUG_TOKEN_MANAGER = true ; + // DEBUG_TOKEN_MANAGER = true; } PARSER_BEGIN(JavaCCParser) @@ -499,24 +499,27 @@ TOKEN : { } < PAREN_DELIMITED > TOKEN : { - < PAREN_DELIMITED_LITERAL : ( ~ [ ")" ] )* ")" > { + < PAREN_DELIMITED_LITERAL : ( < UNPAREN > ( "(" < UNPAREN > ")" )* )* < UNPAREN > ")" > { popState(); matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); } + | < #UNPAREN : ( ~ [ "(", ")" ] )* > } < BRACE_DELIMITED > TOKEN : { - < BRACE_DELIMITED_LITERAL : ( ( ~ [ "}" ] | [ "}" ] ) )* "}" > { + < BRACE_DELIMITED_LITERAL : ( < UNBRACE > ( "{" < UNBRACE > "}" )* )* < UNBRACE > "}" > { popState(); matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); } + | < #UNBRACE : (~ [ "{", "}" ] )* > } < BRACKET_DELIMITED > TOKEN : { - < BRACKET_DELIMITED_LITERAL : ( ( ~ [ "]" ] | [ ")" ] ) )* "]" > { + < BRACKET_DELIMITED_LITERAL : ( < UNBRACKET > ( "[" < UNBRACKET > "]" )* )* < UNBRACKET > "]" > { popState(); matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); } + | < #UNBRACKET : ( ~ [ "[", "]" ] )* > } < SINGLE_QUOTED > TOKEN : { diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java index fa58b9d5a..65490a2ae 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java @@ -144,6 +144,43 @@ public void testNonTrivialCustomPipeLiteral() throws ParsingException { assertEquals(makeReversedConstant(label), result.getConstants().toArray()[0]); } + @Test + public void testNestedParenLiteral() throws ParsingException { + String label = "this is a test, do not worry."; + String input = "p(((" + label + ")))"; + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, + (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + Literal result = RuleParser.parseLiteral(input, parserConfiguration); + assertEquals(makeReversedConstant("(" + label + ")"), result.getConstants().toArray()[0]); + } + + @Test + public void testMultipleParenLiterals() throws ParsingException { + String input = "p((test), (tset))"; + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, + (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + Literal result = RuleParser.parseLiteral(input, parserConfiguration); + List constants = result.getConstants().collect(Collectors.toList()); + List expected = new ArrayList<>( + Arrays.asList(makeReversedConstant("test"), makeReversedConstant("tset"))); + assertEquals(expected, constants); + } + + @Test + public void testMultipleNestedParenLiterals() throws ParsingException { + String input = "p(((test)), ((tset)))"; + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, + (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + Literal result = RuleParser.parseLiteral(input, parserConfiguration); + List constants = result.getConstants().collect(Collectors.toList()); + List expected = new ArrayList<>( + Arrays.asList(makeReversedConstant("(test)"), makeReversedConstant("(tset)"))); + assertEquals(expected, constants); + } + @Test public void testNestedBraceLiteral() throws ParsingException { String label = "this is a test, do not worry."; @@ -155,6 +192,71 @@ public void testNestedBraceLiteral() throws ParsingException { assertEquals(makeReversedConstant("{" + label + "}"), result.getConstants().toArray()[0]); } + @Test + public void testMultipleBraceLiterals() throws ParsingException { + String input = "p({test}, {tset})"; + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, + (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + Literal result = RuleParser.parseLiteral(input, parserConfiguration); + List constants = result.getConstants().collect(Collectors.toList()); + List expected = new ArrayList<>( + Arrays.asList(makeReversedConstant("test"), makeReversedConstant("tset"))); + assertEquals(expected, constants); + } + + @Test + public void testMultipleNestedBraceLiterals() throws ParsingException { + String input = "p({{test}}, {{tset}})"; + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, + (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + Literal result = RuleParser.parseLiteral(input, parserConfiguration); + List constants = result.getConstants().collect(Collectors.toList()); + List expected = new ArrayList<>( + Arrays.asList(makeReversedConstant("{test}"), makeReversedConstant("{tset}"))); + assertEquals(expected, constants); + } + + @Test + public void testNestedBracketLiteral() throws ParsingException { + String label = "this is a test, do not worry."; + String input = "p([[" + label + "]])"; + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, + (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + Literal result = RuleParser.parseLiteral(input, parserConfiguration); + assertEquals(makeReversedConstant("[" + label + "]"), result.getConstants().toArray()[0]); + } + + @Test + public void testMultipleBracketLiterals() throws ParsingException { + String input = "p([test], [tset])"; + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, + (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + Literal result = RuleParser.parseLiteral(input, parserConfiguration); + List constants = result.getConstants().collect(Collectors.toList()); + List expected = new ArrayList<>( + Arrays.asList(makeReversedConstant("test"), makeReversedConstant("tset"))); + assertEquals(expected, constants); + } + + @Test + public void testMultipleNestedBracketLiterals() throws ParsingException { + String input = "p([[test]], [[tset]])"; + + + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, + (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + Literal result = RuleParser.parseLiteral(input, parserConfiguration); + List constants = result.getConstants().collect(Collectors.toList()); + List expected = new ArrayList<>( + Arrays.asList(makeReversedConstant("[test]"), makeReversedConstant("[tset]"))); + assertEquals(expected, constants); + } + @Test public void testMixedAndNestedCustomLiterals() throws ParsingException { ParserConfiguration parserConfiguration = new ParserConfiguration(); From f81afd6b36d5dd97bd5f0a2193b3ca59a8f32269 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 30 Jan 2020 18:31:17 +0100 Subject: [PATCH 0524/1003] Parser: Fix handling of nested configurable literals, really --- .../vlog4j/parser/javacc/JavaCCParser.jj | 116 +++++++++++++----- .../RuleParserConfigurableLiteralTest.java | 19 ++- 2 files changed, 91 insertions(+), 44 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index f9328127b..95d68ed9f 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -7,6 +7,7 @@ options STATIC = false; // DEBUG_PARSER = true; + // DEBUG_LOOKAHEAD = true; // DEBUG_TOKEN_MANAGER = true; } @@ -45,7 +46,6 @@ import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultData import org.semanticweb.vlog4j.core.model.implementation.Expressions; - public class JavaCCParser extends JavaCCParserBase { private SubParserFactory getSubParserFactory() { @@ -68,7 +68,7 @@ TOKEN_MGR_DECLS : { Deque states; void pushState() { - states.push(curLexState); + states.push(curLexState); } void popState() { @@ -76,7 +76,6 @@ TOKEN_MGR_DECLS : { } } - void parse() throws PrefixDeclarationException : { } { ( base() )? @@ -304,6 +303,7 @@ Constant RDFLiteral() throws PrefixDeclarationException : { } Term ConfigurableLiteral() throws ParsingException : { + String s; Token t; } { ( LOOKAHEAD( < PIPE_DELIMITED_LITERAL >, @@ -316,24 +316,75 @@ Term ConfigurableLiteral() throws ParsingException : { t = < HASH_DELIMITED_LITERAL > { return parseConfigurableLiteral(ConfigurableLiteralDelimiter.HASH, t.image, getSubParserFactory()); } - | LOOKAHEAD( < PAREN_DELIMITED_LITERAL >, - { isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.PAREN) } ) - t = < PAREN_DELIMITED_LITERAL > { - return parseConfigurableLiteral(ConfigurableLiteralDelimiter.PAREN, t.image, getSubParserFactory()); + | LOOKAHEAD( < LPAREN >, + { isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.PAREN) } ) + s = parenDelimitedLiteral() { + return parseConfigurableLiteral(ConfigurableLiteralDelimiter.PAREN, s, getSubParserFactory()); } - | LOOKAHEAD( < BRACE_DELIMITED_LITERAL >, + | LOOKAHEAD( < LBRACE >, { isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.BRACE) } ) - t = < BRACE_DELIMITED_LITERAL > { - return parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACE, t.image, getSubParserFactory()); + s = braceDelimitedLiteral() { + return parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACE, s, getSubParserFactory()); } - | LOOKAHEAD( < BRACKET_DELIMITED_LITERAL >, + | LOOKAHEAD( < LBRACKET >, { isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.BRACKET) } ) - t = < BRACKET_DELIMITED_LITERAL > { - return parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACKET, t.image, getSubParserFactory()); + s = bracketDelimitedLiteral() { + return parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACKET, s, getSubParserFactory()); } ) } +String parenDelimitedLiteral() : { + String s; + StringBuilder sb = new StringBuilder(); +} { + < LPAREN > ( s = parenDelimitedLiteralBody() { sb.append(s); } )* < RPAREN > { + return sb.toString(); + } +} + +String parenDelimitedLiteralBody() : { + Token t; + String s; +} { + t = < UNPAREN > { return t.image; } + | ( < LPAREN > s = parenDelimitedLiteralBody() < RPAREN > ) { return "(" + s + ")"; } +} + +String braceDelimitedLiteral() : { + String s; + StringBuilder sb = new StringBuilder(); +} { + < LBRACE > ( s = braceDelimitedLiteralBody() { sb.append(s); } )* < RBRACE > { + return sb.toString(); + } +} + +String braceDelimitedLiteralBody() : { + Token t; + String s; +} { + t = < UNBRACE > { return t.image; } + | ( < LBRACE > s = braceDelimitedLiteralBody() < RBRACE > ) { return "{" + s + "}"; } +} + +String bracketDelimitedLiteral() : { + String s; + StringBuilder sb = new StringBuilder(); +} { + < LBRACKET > ( s = bracketDelimitedLiteralBody() { sb.append(s); } )* < RBRACKET > { + return sb.toString(); + } +} + +String bracketDelimitedLiteralBody() : { + Token t; + String s; +} { + t = < UNBRACKET > { return t.image; } + | ( < LBRACKET > s = bracketDelimitedLiteralBody() < RBRACKET > ) { return "[" + s + "]"; } +} + String String() : { Token t; } { @@ -402,17 +453,19 @@ MORE : { | < #PN_LOCAL : ( < PN_CHARS_U > | [ ":", "0"-"9" ] ) ( ( < PN_CHARS > | [ ".", ":" ] )* < PN_CHARS > )? > | < COMMA : "," > - | < RPAREN : ")" > { popState(); } } -< DEFAULT, BODY , DIRECTIVE_ARGUMENTS > TOKEN : { +< DEFAULT, BODY, TERM, DIRECTIVE_ARGUMENTS, PAREN_DELIMITED > TOKEN : { < LPAREN : "(" > { pushState(); - if (curLexState == DEFAULT || curLexState == BODY) { + if (curLexState == TERM) { + SwitchTo(PAREN_DELIMITED); + } else if (curLexState == DEFAULT || curLexState == BODY) { SwitchTo(TERM); } } + | < RPAREN : ")" > { popState(); } } < TERM, DIRECTIVE_ARGUMENTS > TOKEN : { @@ -479,9 +532,14 @@ TOKEN : { | < "\"\"\"" > { pushState(); } : SIXFOLD_QUOTED | < "|" > { pushState(); } : PIPE_DELIMITED | < "#" > { pushState(); } : HASH_DELIMITED - | < < LPAREN > > { pushState(); } : PAREN_DELIMITED - | < "{" > { pushState(); } : BRACE_DELIMITED - | < "[" > { pushState(); } : BRACKET_DELIMITED +} + +< TERM, BRACE_DELIMITED > TOKEN : { + < LBRACE : "{" > { pushState(); } : BRACE_DELIMITED +} + +< TERM, BRACKET_DELIMITED > TOKEN : { + < LBRACKET : "[" > { pushState(); } : BRACKET_DELIMITED } < PIPE_DELIMITED > TOKEN : { @@ -499,27 +557,17 @@ TOKEN : { } < PAREN_DELIMITED > TOKEN : { - < PAREN_DELIMITED_LITERAL : ( < UNPAREN > ( "(" < UNPAREN > ")" )* )* < UNPAREN > ")" > { - popState(); - matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); - } - | < #UNPAREN : ( ~ [ "(", ")" ] )* > + < UNPAREN : ( ~ [ "(", ")" ] )+ > } < BRACE_DELIMITED > TOKEN : { - < BRACE_DELIMITED_LITERAL : ( < UNBRACE > ( "{" < UNBRACE > "}" )* )* < UNBRACE > "}" > { - popState(); - matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); - } - | < #UNBRACE : (~ [ "{", "}" ] )* > + < RBRACE : "}" > { popState(); } + | < UNBRACE : (~ [ "{", "}" ] )+ > } < BRACKET_DELIMITED > TOKEN : { - < BRACKET_DELIMITED_LITERAL : ( < UNBRACKET > ( "[" < UNBRACKET > "]" )* )* < UNBRACKET > "]" > { - popState(); - matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); - } - | < #UNBRACKET : ( ~ [ "[", "]" ] )* > + < RBRACKET : "]" > { popState(); } + | < UNBRACKET : ( ~ [ "[", "]" ] )+ > } < SINGLE_QUOTED > TOKEN : { diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java index 65490a2ae..51d9173f7 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java @@ -29,7 +29,6 @@ import java.util.stream.Collectors; import org.junit.Test; -import org.junit.Ignore; import org.mockito.ArgumentMatchers; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Literal; @@ -146,13 +145,13 @@ public void testNonTrivialCustomPipeLiteral() throws ParsingException { @Test public void testNestedParenLiteral() throws ParsingException { - String label = "this is a test, do not worry."; - String input = "p(((" + label + ")))"; + String label = "(((this is a test, do not worry.)))"; + String input = "p((" + label + "))"; ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); Literal result = RuleParser.parseLiteral(input, parserConfiguration); - assertEquals(makeReversedConstant("(" + label + ")"), result.getConstants().toArray()[0]); + assertEquals(makeReversedConstant(label), result.getConstants().toArray()[0]); } @Test @@ -183,13 +182,13 @@ public void testMultipleNestedParenLiterals() throws ParsingException { @Test public void testNestedBraceLiteral() throws ParsingException { - String label = "this is a test, do not worry."; - String input = "p({{" + label + "}})"; + String label = "{{{this is a test, do not worry.}}}"; + String input = "p({" + label + "})"; ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); Literal result = RuleParser.parseLiteral(input, parserConfiguration); - assertEquals(makeReversedConstant("{" + label + "}"), result.getConstants().toArray()[0]); + assertEquals(makeReversedConstant(label), result.getConstants().toArray()[0]); } @Test @@ -220,13 +219,13 @@ public void testMultipleNestedBraceLiterals() throws ParsingException { @Test public void testNestedBracketLiteral() throws ParsingException { - String label = "this is a test, do not worry."; - String input = "p([[" + label + "]])"; + String label = "[[[this is a test, do not worry.]]]"; + String input = "p([" + label + "])"; ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); Literal result = RuleParser.parseLiteral(input, parserConfiguration); - assertEquals(makeReversedConstant("[" + label + "]"), result.getConstants().toArray()[0]); + assertEquals(makeReversedConstant(label), result.getConstants().toArray()[0]); } @Test From 912a68e1f828219e4cf8412780d13817d2759d73 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 30 Jan 2020 19:05:45 +0100 Subject: [PATCH 0525/1003] Parser: Extend & slightly refactor tests --- .../RuleParserConfigurableLiteralTest.java | 91 +++++++++---------- .../parser/RuleParserDataSourceTest.java | 2 +- .../vlog4j/syntax/parser/RuleParserTest.java | 6 +- 3 files changed, 49 insertions(+), 50 deletions(-) diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java index 51d9173f7..09770733d 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java @@ -59,12 +59,12 @@ public class RuleParserConfigurableLiteralTest { ConfigurableLiteralDelimiter.BRACKET, bracketConstant); @Test(expected = ParsingException.class) - public void testNoDefaultPipeLiteral() throws ParsingException { + public void parseLiteral_unregisteredCustomLiteral_throws() throws ParsingException { RuleParser.parseLiteral("p(|test|)"); } @Test - public void testCustomLiteralRegistration() throws ParsingException { + public void registerLiteral_succeeds() throws ParsingException { ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler); assertTrue("Configurable Literal Handler has been registered", @@ -72,14 +72,14 @@ public void testCustomLiteralRegistration() throws ParsingException { } @Test(expected = IllegalArgumentException.class) - public void testNoDuplicateCustomLiteralRegistration() throws ParsingException, IllegalArgumentException { + public void registerLiteral_duplicateHandler_throws() throws ParsingException, IllegalArgumentException { ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler) .registerLiteral(ConfigurableLiteralDelimiter.PIPE, hashHandler); } @Test - public void testCustomPipeLiteral() throws ParsingException { + public void parseLiteral_customPipeLiteral_succeeds() throws ParsingException { ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler); Literal result = RuleParser.parseLiteral("p(|test|)", parserConfiguration); @@ -87,7 +87,7 @@ public void testCustomPipeLiteral() throws ParsingException { } @Test - public void testCustomHashLiteral() throws ParsingException { + public void parseLiteral_customHashLiteral_succeeds() throws ParsingException { ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.HASH, hashHandler); Literal result = RuleParser.parseLiteral("p(#test#)", parserConfiguration); @@ -95,7 +95,7 @@ public void testCustomHashLiteral() throws ParsingException { } @Test - public void testCustomParenLiteral() throws ParsingException { + public void parseLiteral_customParenLiteral_succeeds() throws ParsingException { ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, parenHandler); Literal result = RuleParser.parseLiteral("p((test))", parserConfiguration); @@ -103,7 +103,7 @@ public void testCustomParenLiteral() throws ParsingException { } @Test - public void testCustomBraceLiteral() throws ParsingException { + public void parseLiteral_customBraceLiteral_succeeds() throws ParsingException { ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, braceHandler); Literal result = RuleParser.parseLiteral("p({test})", parserConfiguration); @@ -111,7 +111,7 @@ public void testCustomBraceLiteral() throws ParsingException { } @Test - public void testCustomBracketLiteral() throws ParsingException { + public void parseLiteral_customBracketLiteral_succeeds() throws ParsingException { ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, bracketHandler); Literal result = RuleParser.parseLiteral("p([test])", parserConfiguration); @@ -119,7 +119,7 @@ public void testCustomBracketLiteral() throws ParsingException { } @Test - public void testMixedCustomLiterals() throws ParsingException { + public void parseLiteral_mixedLiterals_succeeds() throws ParsingException { ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler) .registerLiteral(ConfigurableLiteralDelimiter.HASH, hashHandler) @@ -133,33 +133,30 @@ public void testMixedCustomLiterals() throws ParsingException { } @Test - public void testNonTrivialCustomPipeLiteral() throws ParsingException { + public void parseLiteral_nontrivialPipeLiteral_succeeds() throws ParsingException { String label = "this is a test, do not worry."; String input = "p(|" + label + "|)"; ParserConfiguration parserConfiguration = new ParserConfiguration(); - parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, - (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); assertEquals(makeReversedConstant(label), result.getConstants().toArray()[0]); } @Test - public void testNestedParenLiteral() throws ParsingException { + public void parseLiteral_nestedParenLiterals_succeeds() throws ParsingException { String label = "(((this is a test, do not worry.)))"; String input = "p((" + label + "))"; ParserConfiguration parserConfiguration = new ParserConfiguration(); - parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, - (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); assertEquals(makeReversedConstant(label), result.getConstants().toArray()[0]); } @Test - public void testMultipleParenLiterals() throws ParsingException { + public void parseLiteral_multipleParenLiterals_succeeds() throws ParsingException { String input = "p((test), (tset))"; ParserConfiguration parserConfiguration = new ParserConfiguration(); - parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, - (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); List constants = result.getConstants().collect(Collectors.toList()); List expected = new ArrayList<>( @@ -168,35 +165,40 @@ public void testMultipleParenLiterals() throws ParsingException { } @Test - public void testMultipleNestedParenLiterals() throws ParsingException { - String input = "p(((test)), ((tset)))"; + public void parseLiteral_multipleNestedParenLiterals_succeeds() throws ParsingException { + String input = "p(((test)), ((tset), (tst)))"; ParserConfiguration parserConfiguration = new ParserConfiguration(); - parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, - (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); List constants = result.getConstants().collect(Collectors.toList()); List expected = new ArrayList<>( - Arrays.asList(makeReversedConstant("(test)"), makeReversedConstant("(tset)"))); + Arrays.asList(makeReversedConstant("(test)"), makeReversedConstant("(tset), (tst)"))); assertEquals(expected, constants); } + @Test(expected = ParsingException.class) + public void parseLiteral_mismatchedNestedParenLiteral_throws() throws ParsingException { + String input = "p((test ())"; + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, reversingHandler); + RuleParser.parseLiteral(input, parserConfiguration); + } + @Test - public void testNestedBraceLiteral() throws ParsingException { + public void parseLiteral_nestedBraceLiteral_succeeds() throws ParsingException { String label = "{{{this is a test, do not worry.}}}"; String input = "p({" + label + "})"; ParserConfiguration parserConfiguration = new ParserConfiguration(); - parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, - (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); assertEquals(makeReversedConstant(label), result.getConstants().toArray()[0]); } @Test - public void testMultipleBraceLiterals() throws ParsingException { + public void parseLiteral_multipleBraceLiterals_succeeds() throws ParsingException { String input = "p({test}, {tset})"; ParserConfiguration parserConfiguration = new ParserConfiguration(); - parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, - (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); List constants = result.getConstants().collect(Collectors.toList()); List expected = new ArrayList<>( @@ -205,11 +207,10 @@ public void testMultipleBraceLiterals() throws ParsingException { } @Test - public void testMultipleNestedBraceLiterals() throws ParsingException { + public void parseLiteral_multipleNestedBraceLiterals_succeeds() throws ParsingException { String input = "p({{test}}, {{tset}})"; ParserConfiguration parserConfiguration = new ParserConfiguration(); - parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, - (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); List constants = result.getConstants().collect(Collectors.toList()); List expected = new ArrayList<>( @@ -218,22 +219,20 @@ public void testMultipleNestedBraceLiterals() throws ParsingException { } @Test - public void testNestedBracketLiteral() throws ParsingException { + public void parseLiteral_nestedBracketLiteral_succeeds() throws ParsingException { String label = "[[[this is a test, do not worry.]]]"; String input = "p([" + label + "])"; ParserConfiguration parserConfiguration = new ParserConfiguration(); - parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, - (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); assertEquals(makeReversedConstant(label), result.getConstants().toArray()[0]); } @Test - public void testMultipleBracketLiterals() throws ParsingException { + public void parseLiteral_multipleBracketLiterals_succeeds() throws ParsingException { String input = "p([test], [tset])"; ParserConfiguration parserConfiguration = new ParserConfiguration(); - parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, - (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); List constants = result.getConstants().collect(Collectors.toList()); List expected = new ArrayList<>( @@ -242,22 +241,19 @@ public void testMultipleBracketLiterals() throws ParsingException { } @Test - public void testMultipleNestedBracketLiterals() throws ParsingException { - String input = "p([[test]], [[tset]])"; - - + public void parseLiteral_multipleNestedBracketLiterals_succeeds() throws ParsingException { + String input = "p([[test]], [[tset], [tst]])"; ParserConfiguration parserConfiguration = new ParserConfiguration(); - parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, - (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); List constants = result.getConstants().collect(Collectors.toList()); List expected = new ArrayList<>( - Arrays.asList(makeReversedConstant("[test]"), makeReversedConstant("[tset]"))); + Arrays.asList(makeReversedConstant("[test]"), makeReversedConstant("[tset], [tst]"))); assertEquals(expected, constants); } @Test - public void testMixedAndNestedCustomLiterals() throws ParsingException { + public void parseLiteral_mixedAndNestedLiterals_succeeds() throws ParsingException { ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler) .registerLiteral(ConfigurableLiteralDelimiter.HASH, hashHandler) @@ -276,6 +272,9 @@ static Constant makeReversedConstant(String name) { return Expressions.makeAbstractConstant(builder.reverse().toString()); } + static ConfigurableLiteralHandler reversingHandler = + (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm); + static ConfigurableLiteralHandler getMockLiteralHandler(ConfigurableLiteralDelimiter delimiter, Constant constant) { ConfigurableLiteralHandler handler = mock(ConfigurableLiteralHandler.class); try { diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java index 5a357f407..42a835558 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java @@ -190,7 +190,7 @@ public void sparqlDataSourceDeclaration_invalidNumberOfArguments_throws() throws } @Test - public void parseDataSourceDeclaration_windowsStylePathName_success() throws ParsingException, IOException { + public void parseDataSourceDeclaration_windowsStylePathName_succeeds() throws ParsingException, IOException { RuleParser.parseDataSourceDeclaration("@source p[1] : load-csv(\"\\\\test\\\\with\\\\backslashes.csv\") ."); } } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 8a75123bd..061fcc00f 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -238,7 +238,7 @@ public void testIncompleteStringLiteral() throws ParsingException { } @Test - public void parseLiteral_escapeSequences_success() throws ParsingException { + public void parseLiteral_escapeSequences_succeeds() throws ParsingException { String input = "p(\"_\\\"_\\\\_\\n_\\t_\")"; // User input: p("_\"_\\_\n_\t_") PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("_\"_\\_\n_\t_", PrefixDeclarations.XSD_STRING)); @@ -253,7 +253,7 @@ public void parseLiteral_escapeSequences_roundTrips() throws ParsingException { } @Test - public void parseLiteral_allEscapeSequences_success() throws ParsingException { + public void parseLiteral_allEscapeSequences_succeeds() throws ParsingException { // User input: p("_\n_\t_\r_\b_\f_\'_\"_\\_") String input = "p(\"_\\n_\\t_\\r_\\b_\\f_\\'_\\\"_\\\\_\")"; PositiveLiteral fact = Expressions.makePositiveLiteral("p", @@ -281,7 +281,7 @@ public void parseLiteral_incompleteEscapeAtEndOfLiteral_throws() throws ParsingE } @Test - public void parseLiteral_multiLineLiteral_success() throws ParsingException { + public void parseLiteral_multiLineLiteral_succeeds() throws ParsingException { String input = "p('''line 1\n\n" + "line 2\n" + "line 3''')"; // User input: p("a\"b\\c") PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("line 1\n\nline 2\nline 3", PrefixDeclarations.XSD_STRING)); From 0850a674de6875f45c0be99532cb935cb9900e79 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 31 Jan 2020 16:18:21 +0100 Subject: [PATCH 0526/1003] Parser: Generalise Configurable Literals to return Terms --- .../semanticweb/vlog4j/parser/ConfigurableLiteralHandler.java | 4 ++-- .../org/semanticweb/vlog4j/parser/ParserConfiguration.java | 3 ++- .../java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj | 2 +- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ConfigurableLiteralHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ConfigurableLiteralHandler.java index a98bfec68..bd5b14c24 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ConfigurableLiteralHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ConfigurableLiteralHandler.java @@ -20,7 +20,7 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; /** @@ -40,5 +40,5 @@ public interface ConfigurableLiteralHandler { * @throws ParsingException when the given syntactic form is invalid. * @return an appropriate @{link Constant} instance. */ - public Constant parseLiteral(String syntacticForm, final SubParserFactory subParserFactory) throws ParsingException; + public Term parseLiteral(String syntacticForm, final SubParserFactory subParserFactory) throws ParsingException; } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index 096a1a06c..e9536d756 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -28,6 +28,7 @@ import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase.ConfigurableLiteralDelimiter; import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; @@ -151,7 +152,7 @@ public boolean isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter deli * the given syntactic form is invalid. * @return an appropriate {@link Constant} instance. */ - public Constant parseConfigurableLiteral(ConfigurableLiteralDelimiter delimiter, String syntacticForm, + public Term parseConfigurableLiteral(ConfigurableLiteralDelimiter delimiter, String syntacticForm, final SubParserFactory subParserFactory) throws ParsingException { if (!isConfigurableLiteralRegistered(delimiter)) { throw new ParsingException( diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 95d68ed9f..9ecf7ded8 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -501,7 +501,7 @@ TOKEN : { < BASE : "base" > : DIRECTIVE_ARGUMENTS | < PREFIX : "prefix" > : DIRECTIVE_ARGUMENTS | < SOURCE : "source" > : DIRECTIVE_ARGUMENTS - | < CUSTOM : < DIRECTIVENAME > > : DIRECTIVE_ARGUMENTS + | < CUSTOM_DIRECTIVE : < DIRECTIVENAME > > : DIRECTIVE_ARGUMENTS | < DIRECTIVENAME : [ "a"-"z", "A"-"Z" ] ([ "a"-"z", "A"-"Z", "0"-"9", "-", "_" ])* > } From bd32a10690787acc8b6f2e777f973359ccfd7039 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Sun, 2 Feb 2020 14:55:52 +0100 Subject: [PATCH 0527/1003] Parser: Make handling of arguments in directives type-safe --- .../parser/DataSourceDeclarationHandler.java | 4 +- .../vlog4j/parser/DirectiveArgument.java | 208 ++++++++++++++++++ .../vlog4j/parser/ParserConfiguration.java | 8 +- .../CsvFileDataSourceDeclarationHandler.java | 14 +- .../RdfFileDataSourceDeclarationHandler.java | 14 +- ...eryResultDataSourceDeclarationHandler.java | 46 ++-- .../vlog4j/parser/javacc/JavaCCParser.jj | 31 ++- .../parser/javacc/JavaCCParserBase.java | 3 +- .../parser/RuleParserDataSourceTest.java | 6 +- 9 files changed, 286 insertions(+), 48 deletions(-) create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveArgument.java diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java index 180b93053..7a519414e 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java @@ -47,7 +47,7 @@ public interface DataSourceDeclarationHandler { * Source, or the number of arguments is invalid. * @return a {@link DataSource} instance corresponding to the given arguments. */ - DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) + DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) throws ParsingException; /** @@ -59,7 +59,7 @@ DataSource handleDeclaration(List arguments, final SubParserFactory subP * @throws ParsingException when the given number of Arguments is invalid for * the Data Source. */ - static void validateNumberOfArguments(final List arguments, final int number) throws ParsingException { + static void validateNumberOfArguments(final List arguments, final int number) throws ParsingException { if (arguments.size() != number) { throw new ParsingException("Invalid number of arguments " + arguments.size() + " for Data Source declaration, expected " + number); diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveArgument.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveArgument.java new file mode 100644 index 000000000..d7fc50b95 --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveArgument.java @@ -0,0 +1,208 @@ +package org.semanticweb.vlog4j.parser; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.net.URL; +import java.util.Optional; +import java.util.function.Function; + +import org.semanticweb.vlog4j.core.model.api.Term; + +/** + * A tagged union representing the possible types allowed to appear as arguments + * in directives. + * + * @author Maximilian Marx + */ +public abstract class DirectiveArgument { + private DirectiveArgument() { + } + + /** + * Apply a function to the contained value. + * + * @argument stringHandler the function to apply to a string argument + * @argument iriHandler the function to apply to an IRI + * @argument termHandler the function to apply to a Term + * + * @return the value returned by the appropriate handler function + */ + public abstract V apply(Function stringHandler, + Function iriHandler, Function termHandler); + + /** + * Partially compare two arguments, without comparing the actual values. + * + * @argument other the Object to compare to. + * + * @return An {@link Optional} containing true if the arguments are surely + * equal, containing false if the arguments are not equal, or an empty + * Optional if the values of the arguments need to be compared. + * + */ + protected Optional isEqual(Object other) { + if (other == null) { + return Optional.of(false); + } + + if (other == this) { + return Optional.of(true); + } + + if (!(other instanceof DirectiveArgument)) { + return Optional.of(false); + } + + return Optional.empty(); + } + + /** + * Create an argument containing a String. + * + * @argument value the string value + * + * @return An argument containing the given string value + */ + public static DirectiveArgument string(String value) { + return new DirectiveArgument() { + @Override + public V apply(Function stringHandler, + Function iriHandler, Function termHandler) { + return stringHandler.apply(value); + } + + @Override + public boolean equals(Object other) { + Optional maybeEquals = isEqual(other); + + if (maybeEquals.isPresent()) { + return maybeEquals.get(); + } + + DirectiveArgument otherArgument = (DirectiveArgument) other; + return otherArgument.apply(str -> str.equals(value), iri -> false, term -> false); + } + + @Override + public int hashCode() { + return 41 * value.hashCode(); + } + }; + } + + /** + * Create an argument containing a IRI. + * + * @argument value the IRI value + * + * @return An argument containing the given IRI value + */ + public static DirectiveArgument iri(URL value) { + return new DirectiveArgument() { + @Override + public V apply(Function stringHandler, + Function iriHandler, Function termHandler) { + return iriHandler.apply(value); + } + + @Override + public boolean equals(Object other) { + Optional maybeEquals = isEqual(other); + + if (maybeEquals.isPresent()) { + return maybeEquals.get(); + } + + DirectiveArgument otherArgument = (DirectiveArgument) other; + return otherArgument.apply(str -> false, iri -> iri.equals(value), term -> false); + } + + @Override + public int hashCode() { + return 43 * value.hashCode(); + } + }; + } + + /** + * Create an argument containing a Term. + * + * @argument value the Term value + * + * @return An argument containing the given Term value + */ + public static DirectiveArgument term(Term value) { + return new DirectiveArgument() { + @Override + public V apply(Function stringHandler, + Function iriHandler, Function termHandler) { + return termHandler.apply(value); + } + + @Override + public boolean equals(Object other) { + Optional maybeEquals = isEqual(other); + + if (maybeEquals.isPresent()) { + return maybeEquals.get(); + } + + DirectiveArgument otherArgument = (DirectiveArgument) other; + return otherArgument.apply(str -> false, iri -> false, term -> term.equals(value)); + } + + @Override + public int hashCode() { + return 47 * value.hashCode(); + } + }; + } + + /** + * Create an optional from a (possible) string value. + * + * @return An optional containing the contained string, or an empty Optional if + * the argument doesn't contain a string. + */ + public Optional fromString() { + return this.apply(Optional::of, value -> Optional.empty(), value -> Optional.empty()); + } + + /** + * Create an optional from a (possible) IRI value. + * + * @return An optional containing the contained IRI, or an empty Optional if the + * argument doesn't contain a IRI. + */ + public Optional fromIri() { + return this.apply(value -> Optional.empty(), Optional::of, value -> Optional.empty()); + } + + /** + * Create an optional from a (possible) Term value. + * + * @return An optional containing the contained Term, or an empty Optional if + * the argument doesn't contain a Term. + */ + public Optional fromTerm() { + return this.apply(value -> Optional.empty(), value -> Optional.empty(), Optional::of); + } +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index e9536d756..204acafe8 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -95,8 +95,8 @@ public ParserConfiguration registerDataSource(final String name, final DataSourc * * @return the Data Source instance. */ - public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(final String name, final List args, - final SubParserFactory subParserFactory) throws ParsingException { + public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(final String name, + final List args, final SubParserFactory subParserFactory) throws ParsingException { final DataSourceDeclarationHandler handler = this.dataSources.get(name); if (handler == null) { @@ -113,8 +113,8 @@ public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(final Strin * @param languageTag the language tag, or null if not present. * @param the datatype, or null if not present. * - * @throws ParsingException when the lexical form is invalid for the - * given data type. + * @throws ParsingException when the lexical form is invalid for the given data + * type. * @return the {@link Constant} corresponding to the given arguments. */ public Constant parseDatatypeConstant(final String lexicalForm, final String datatype) throws ParsingException { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java index 8b7db9640..afe42c2cf 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java @@ -23,10 +23,12 @@ import java.io.File; import java.io.IOException; import java.util.List; +import java.util.NoSuchElementException; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.vlog4j.parser.DataSourceDeclarationHandler; +import org.semanticweb.vlog4j.parser.DirectiveArgument; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; @@ -37,12 +39,18 @@ */ public class CsvFileDataSourceDeclarationHandler implements DataSourceDeclarationHandler { @Override - public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) + public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) throws ParsingException { DataSourceDeclarationHandler.validateNumberOfArguments(arguments, 1); - String fileName = arguments.get(0); - File file = new File(fileName); + DirectiveArgument fileNameArgument = arguments.get(0); + String fileName; + try { + fileName = fileNameArgument.fromString().get(); + } catch (NoSuchElementException e) { + throw new ParsingException("File name \"" + fileNameArgument + "\" is not a string.", e); + } + File file = new File(fileName); try { return new CsvFileDataSource(file); } catch (IOException e) { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java index a17145e19..e54fc66c7 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java @@ -23,10 +23,12 @@ import java.io.File; import java.io.IOException; import java.util.List; +import java.util.NoSuchElementException; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.parser.DataSourceDeclarationHandler; +import org.semanticweb.vlog4j.parser.DirectiveArgument; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; @@ -37,12 +39,18 @@ */ public class RdfFileDataSourceDeclarationHandler implements DataSourceDeclarationHandler { @Override - public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) + public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) throws ParsingException { DataSourceDeclarationHandler.validateNumberOfArguments(arguments, 1); - String fileName = arguments.get(0); - File file = new File(fileName); + DirectiveArgument fileNameArgument = arguments.get(0); + String fileName; + try { + fileName = fileNameArgument.fromString().get(); + } catch (NoSuchElementException e) { + throw new ParsingException("File name \"" + fileNameArgument + "\" is not a string.", e); + } + File file = new File(fileName); try { return new RdfFileDataSource(file); } catch (IOException e) { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java index b49683115..9f313ddd1 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java @@ -4,14 +4,14 @@ * #%L * VLog4j Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 VLog4j Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,17 +20,15 @@ * #L% */ -import java.net.MalformedURLException; import java.net.URL; import java.util.List; +import java.util.NoSuchElementException; -import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; import org.semanticweb.vlog4j.parser.DataSourceDeclarationHandler; +import org.semanticweb.vlog4j.parser.DirectiveArgument; import org.semanticweb.vlog4j.parser.ParsingException; -import org.semanticweb.vlog4j.parser.javacc.JavaCCParser; -import org.semanticweb.vlog4j.parser.javacc.ParseException; import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; /** @@ -40,29 +38,35 @@ */ public class SparqlQueryResultDataSourceDeclarationHandler implements DataSourceDeclarationHandler { @Override - public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) + public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) throws ParsingException { DataSourceDeclarationHandler.validateNumberOfArguments(arguments, 3); - String endpoint = arguments.get(0); - JavaCCParser parser = subParserFactory.makeSubParser(endpoint); - String parsedEndpoint; + DirectiveArgument endpointArgument = arguments.get(0); + URL endpoint; try { - parsedEndpoint = parser.absoluteIri(); - } catch (ParseException | PrefixDeclarationException e) { - throw new ParsingException("Error while parsing endpoint IRI in SPARQL query data source: " + e.getMessage(), e); + endpoint = endpointArgument.fromIri().get(); + } catch (NoSuchElementException e) { + throw new ParsingException( + "SPARQL endpoint \"" + endpointArgument + "\" is not a valid IRI: " + e.getMessage(), e); } - URL endpointUrl; + DirectiveArgument variablesArgument = arguments.get(1); + String variables; try { - endpointUrl = new URL(parsedEndpoint); - } catch (MalformedURLException e) { - throw new ParsingException("SPARQL endpoint \"" + endpoint + "\" is not a valid URL: " + e.getMessage(), e); + variables = variablesArgument.fromString().get(); + } catch (NoSuchElementException e) { + throw new ParsingException("Variables list \"" + variablesArgument + "\" is not a string.", e); } - String variables = arguments.get(1); - String query = arguments.get(2); + DirectiveArgument queryArgument = arguments.get(2); + String query; + try { + query = queryArgument.fromString().get(); + } catch (NoSuchElementException e) { + throw new ParsingException("Query fragment \"" + queryArgument + "\" is not a string.", e); + } - return new SparqlQueryResultDataSource(endpointUrl, variables, query); + return new SparqlQueryResultDataSource(endpoint, variables, query); } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 9ecf7ded8..1176f9c7a 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -27,6 +27,7 @@ import java.util.ArrayDeque; import java.util.LinkedList; import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.DirectiveArgument; import org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; @@ -109,12 +110,6 @@ String absoluteIri() throws PrefixDeclarationException : { | iri = < PNAME_LN > { return resolvePrefixedName(iri.image); } } -String quotedIri() throws PrefixDeclarationException : { - String iri; -} { - iri = absoluteIri() { return "<" + iri + ">"; } -} - void source() throws PrefixDeclarationException : { String predicateName; DataSource dataSource; @@ -132,7 +127,7 @@ void source() throws PrefixDeclarationException : { DataSource dataSource() throws PrefixDeclarationException : { Token sourceName; - List< String > arguments; + List< DirectiveArgument > arguments; } { ( sourceName = < ARGUMENT_NAME > | sourceName = < VARORPREDNAME > ) < LPAREN > arguments = Arguments() < RPAREN > { @@ -395,13 +390,25 @@ String String() : { ) { return unescapeStr(t.image, t.beginLine, t.beginColumn); } } -LinkedList< String > Arguments() throws PrefixDeclarationException : { +LinkedList< DirectiveArgument > Arguments() throws PrefixDeclarationException : { + DirectiveArgument argument; String str; - LinkedList< String > rest = new LinkedList< String >(); + Term t; + LinkedList< DirectiveArgument > rest = new LinkedList< DirectiveArgument >(); } { - ( str = String() - | str = quotedIri()) [< COMMA > rest = Arguments()] { - rest.addFirst(str); + ( str = String() { argument = DirectiveArgument.string(str); } + | str = absoluteIri() { + URL url; + try { + url = new URL(str); + } catch (MalformedURLException e) { + throw makeParseExceptionWithCause("Error parsing IRIhandler: " + e.getMessage(), e); + } + argument = DirectiveArgument.iri(url); + } + | t = term(FormulaContext.HEAD) { argument = DirectiveArgument.term(t); } + ) [< COMMA > rest = Arguments()] { + rest.addFirst(argument); return rest; } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 273c9f403..54c4b7212 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -35,6 +35,7 @@ import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.DefaultParserConfiguration; +import org.semanticweb.vlog4j.parser.DirectiveArgument; import org.semanticweb.vlog4j.parser.LocalPrefixDeclarations; import org.semanticweb.vlog4j.parser.ParserConfiguration; import org.semanticweb.vlog4j.parser.ParsingException; @@ -312,7 +313,7 @@ PrefixDeclarations getPrefixDeclarations() { } DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(String syntacticForm, - List arguments, SubParserFactory subParserFactory) throws ParseException { + List arguments, SubParserFactory subParserFactory) throws ParseException { try { return parserConfiguration.parseDataSourceSpecificPartOfDataSourceDeclaration(syntacticForm, arguments, subParserFactory); diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java index 42a835558..ac55a714e 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java @@ -43,6 +43,7 @@ import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; import org.semanticweb.vlog4j.parser.DataSourceDeclarationHandler; +import org.semanticweb.vlog4j.parser.DirectiveArgument; import org.semanticweb.vlog4j.parser.ParserConfiguration; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; @@ -141,11 +142,12 @@ public void testCustomDataSource() throws ParsingException { DataSourceDeclarationHandler handler = mock(DataSourceDeclarationHandler.class); ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerDataSource("mock-source", handler); - doReturn(source).when(handler).handleDeclaration(ArgumentMatchers.>any(), + doReturn(source).when(handler).handleDeclaration(ArgumentMatchers.>any(), ArgumentMatchers.any()); String input = "@source p[2] : mock-source(\"hello\", \"world\") ."; - List expectedArguments = Arrays.asList("hello", "world"); + List expectedArguments = Arrays.asList(DirectiveArgument.string("hello"), + DirectiveArgument.string("world")); RuleParser.parseDataSourceDeclaration(input, parserConfiguration); verify(handler).handleDeclaration(eq(expectedArguments), ArgumentMatchers.any()); From 548b38e32b7bc37009a7b6d86cf4ed7f9b6eceaf Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 3 Feb 2020 13:06:34 +0100 Subject: [PATCH 0528/1003] Parser: Generalise handling of directives from DataSources --- .../parser/DataSourceDeclarationHandler.java | 38 +---------- .../vlog4j/parser/DirectiveHandler.java | 66 +++++++++++++++++++ .../vlog4j/parser/ParserConfiguration.java | 2 +- .../CsvFileDataSourceDeclarationHandler.java | 5 +- .../RdfFileDataSourceDeclarationHandler.java | 5 +- ...eryResultDataSourceDeclarationHandler.java | 9 +-- .../parser/RuleParserDataSourceTest.java | 4 +- 7 files changed, 82 insertions(+), 47 deletions(-) create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java index 7a519414e..4ec871f68 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java @@ -20,11 +20,9 @@ * #L% */ -import java.util.List; +import org.semanticweb.vlog4j.parser.DirectiveHandler; import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; /** * Handler for parsing a custom Data Source declaration. @@ -32,37 +30,5 @@ * @author Maximilian Marx */ @FunctionalInterface -public interface DataSourceDeclarationHandler { - /** - * Parse a Data Source Declaration. - * - * This is called by the parser to instantiate the {@link DataSource} component - * of a {@link DataSourceDeclaration}. - * - * @param arguments Arguments given to the Data Source declaration. - * @param subParserFactory a factory for obtaining a SubParser, sharing the - * parser's state, but bound to new input. - * - * @throws ParsingException when any of the arguments is invalid for the Data - * Source, or the number of arguments is invalid. - * @return a {@link DataSource} instance corresponding to the given arguments. - */ - DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) - throws ParsingException; - - /** - * Validate the provided number of arguments to the data source. - * - * @param arguments Arguments given to the Data Source declaration. - * @param number expected number of arguments - * - * @throws ParsingException when the given number of Arguments is invalid for - * the Data Source. - */ - static void validateNumberOfArguments(final List arguments, final int number) throws ParsingException { - if (arguments.size() != number) { - throw new ParsingException("Invalid number of arguments " + arguments.size() - + " for Data Source declaration, expected " + number); - } - } +public interface DataSourceDeclarationHandler extends DirectiveHandler { } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java new file mode 100644 index 000000000..2883225be --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java @@ -0,0 +1,66 @@ +package org.semanticweb.vlog4j.parser; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.List; + +import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; + +/** + * Handler for parsing a custom directive. + * + * @author Maximilian Marx + */ +@FunctionalInterface +public interface DirectiveHandler { + /** + * Parse a Directive. + * + * This is called by the parser to parse directives. + * + * @param arguments Arguments given to the Directive statement. + * @param subParserFactory a factory for obtaining a SubParser, sharing the + * parser's state, but bound to new input. + * + * @throws ParsingException when any of the arguments is invalid for the + * directive, or the number of arguments is invalid. + * @return a {@code T} instance corresponding to the given arguments. + */ + T handleDirective(List arguments, final SubParserFactory subParserFactory) + throws ParsingException; + + /** + * Validate the provided number of arguments to the data source. + * + * @param arguments Arguments given to the Directive statement. + * @param number expected number of arguments + * + * @throws ParsingException when the given number of Arguments is invalid for + * the Directive statement. + */ + static void validateNumberOfArguments(final List arguments, final int number) + throws ParsingException { + if (arguments.size() != number) { + throw new ParsingException( + "Invalid number of arguments " + arguments.size() + " for Directive statement, expected " + number); + } + } +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index 204acafe8..6ad757659 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -103,7 +103,7 @@ public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(final Strin throw new ParsingException("Data source \"" + name + "\" is not known."); } - return handler.handleDeclaration(args, subParserFactory); + return handler.handleDirective(args, subParserFactory); } /** diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java index afe42c2cf..edd66a12f 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java @@ -29,6 +29,7 @@ import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.vlog4j.parser.DataSourceDeclarationHandler; import org.semanticweb.vlog4j.parser.DirectiveArgument; +import org.semanticweb.vlog4j.parser.DirectiveHandler; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; @@ -39,9 +40,9 @@ */ public class CsvFileDataSourceDeclarationHandler implements DataSourceDeclarationHandler { @Override - public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) + public DataSource handleDirective(List arguments, final SubParserFactory subParserFactory) throws ParsingException { - DataSourceDeclarationHandler.validateNumberOfArguments(arguments, 1); + DirectiveHandler.validateNumberOfArguments(arguments, 1); DirectiveArgument fileNameArgument = arguments.get(0); String fileName; try { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java index e54fc66c7..32a5f6b23 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java @@ -29,6 +29,7 @@ import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.parser.DataSourceDeclarationHandler; import org.semanticweb.vlog4j.parser.DirectiveArgument; +import org.semanticweb.vlog4j.parser.DirectiveHandler; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; @@ -39,9 +40,9 @@ */ public class RdfFileDataSourceDeclarationHandler implements DataSourceDeclarationHandler { @Override - public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) + public DataSource handleDirective(List arguments, final SubParserFactory subParserFactory) throws ParsingException { - DataSourceDeclarationHandler.validateNumberOfArguments(arguments, 1); + DirectiveHandler.validateNumberOfArguments(arguments, 1); DirectiveArgument fileNameArgument = arguments.get(0); String fileName; try { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java index 9f313ddd1..cda6f38b9 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -28,6 +28,7 @@ import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; import org.semanticweb.vlog4j.parser.DataSourceDeclarationHandler; import org.semanticweb.vlog4j.parser.DirectiveArgument; +import org.semanticweb.vlog4j.parser.DirectiveHandler; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; @@ -38,9 +39,9 @@ */ public class SparqlQueryResultDataSourceDeclarationHandler implements DataSourceDeclarationHandler { @Override - public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) + public DataSource handleDirective(List arguments, final SubParserFactory subParserFactory) throws ParsingException { - DataSourceDeclarationHandler.validateNumberOfArguments(arguments, 3); + DirectiveHandler.validateNumberOfArguments(arguments, 3); DirectiveArgument endpointArgument = arguments.get(0); URL endpoint; diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java index ac55a714e..4d045d292 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java @@ -142,7 +142,7 @@ public void testCustomDataSource() throws ParsingException { DataSourceDeclarationHandler handler = mock(DataSourceDeclarationHandler.class); ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerDataSource("mock-source", handler); - doReturn(source).when(handler).handleDeclaration(ArgumentMatchers.>any(), + doReturn(source).when(handler).handleDirective(ArgumentMatchers.>any(), ArgumentMatchers.any()); String input = "@source p[2] : mock-source(\"hello\", \"world\") ."; @@ -150,7 +150,7 @@ public void testCustomDataSource() throws ParsingException { DirectiveArgument.string("world")); RuleParser.parseDataSourceDeclaration(input, parserConfiguration); - verify(handler).handleDeclaration(eq(expectedArguments), ArgumentMatchers.any()); + verify(handler).handleDirective(eq(expectedArguments), ArgumentMatchers.any()); } @Test From c273806cd99c2a49662c7701b0246d4f36db05fb Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 3 Feb 2020 16:21:43 +0100 Subject: [PATCH 0529/1003] Parser: Support parsing of Named Nulls in facts --- .../core/model/implementation/Serializer.java | 6 +- .../vlog4j/core/model/TermImplTest.java | 6 +- .../vlog4j/parser/ParserConfiguration.java | 55 +++++++++++++++++++ .../vlog4j/parser/javacc/JavaCCParser.jj | 9 ++- .../parser/javacc/JavaCCParserBase.java | 10 ++++ .../parser/RuleParserParseFactTest.java | 37 ++++++++++--- 6 files changed, 106 insertions(+), 17 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index a9f7006f4..b9f3c2b51 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -57,7 +57,7 @@ public final class Serializer { public static final String NEGATIVE_IDENTIFIER = "~"; public static final String EXISTENTIAL_IDENTIFIER = "!"; public static final String UNIVERSAL_IDENTIFIER = "?"; - public static final String NAMEDNULL_IDENTIFIER = "_"; + public static final String NAMEDNULL_IDENTIFIER = "_:"; public static final String OPENING_PARENTHESIS = "("; public static final String CLOSING_PARENTHESIS = ")"; public static final String OPENING_BRACKET = "["; @@ -342,7 +342,7 @@ private static String getIRIString(final String string) { *
            * Example for {@code string = "\\a"}, the returned value is * {@code string = "\"\\\\a\""} - * + * * @param string * @return an escaped string surrounded by {@code "}. */ @@ -362,7 +362,7 @@ public static String getString(final String string) { *
          • {@code \r}
          • *
          • {@code \f}
          • *
              - * + * * @param string * @return an escaped string */ diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java index 81bf20c96..aed7efd89 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -167,7 +167,7 @@ public void existentialVariableToStringTest() { @Test public void namedNullToStringTest() { NamedNullImpl n = new NamedNullImpl("123"); - assertEquals("_123", n.toString()); + assertEquals("_:123", n.toString()); } @Test(expected = NullPointerException.class) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index 6ad757659..6e97f8245 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -39,6 +39,11 @@ * @author Maximilian Marx */ public class ParserConfiguration { + /** + * Whether to allow parsing Named Nulls. + */ + private boolean allowNamedNulls = false; + /** * The registered data sources. */ @@ -183,6 +188,17 @@ public ParserConfiguration registerDatatype(final String name, final DatatypeCon return this; } + /** + * Register a custom literal handler. + * + * @argument delimiter the delimiter to handle. + * @argument handler the handler for this literal type. + * + * @throws IllegalArgumentException when the literal delimiter has + * already been registered. + * + * @return this + */ public ParserConfiguration registerLiteral(ConfigurableLiteralDelimiter delimiter, ConfigurableLiteralHandler handler) throws IllegalArgumentException { if (literals.containsKey(delimiter)) { @@ -192,4 +208,43 @@ public ParserConfiguration registerLiteral(ConfigurableLiteralDelimiter delimite this.literals.put(delimiter, handler); return this; } + + /** + * Set whether to allow parsing of {@link org.semanticweb.vlog4j.core.model.api.NamedNull}. + * + * @argument allow true allows parsing of named nulls. + * + * @return this + */ + public ParserConfiguration setNamedNulls(boolean allow) { + this.allowNamedNulls = allow; + return this; + } + + /** + * Allow parsing of {@link org.semanticweb.vlog4j.core.model.api.NamedNull}. + * + * @return this + */ + public ParserConfiguration allowNamedNulls() { + return this.setNamedNulls(true); + } + + /** + * Disallow parsing of {@link org.semanticweb.vlog4j.core.model.api.NamedNull}. + * + * @return this + */ + public ParserConfiguration disallowNamedNulls() { + return this.setNamedNulls(false); + } + + /** + * Whether parsing of {@link org.semanticweb.vlog4j.core.model.api.NamedNull} is allowed. + * + * @return this + */ + public boolean isParsingOfNamedNullsAllowed() { + return this.allowNamedNulls; + } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 1176f9c7a..631e5c422 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -251,6 +251,8 @@ Term term(FormulaContext context) throws PrefixDeclarationException : { } { s = absoluteIri() { return createConstant(s); } | t = < VARORPREDNAME > { return createConstant(t.image); } + | LOOKAHEAD( < NAMED_NULL >, { isParsingOfNamedNullsAllowed() }) + t = < NAMED_NULL > { return createNamedNull(t.image); } | c = NumericLiteral() { return c; } | c = RDFLiteral() { return c; } | t = < UNIVAR > { @@ -396,8 +398,8 @@ LinkedList< DirectiveArgument > Arguments() throws PrefixDeclarationException : Term t; LinkedList< DirectiveArgument > rest = new LinkedList< DirectiveArgument >(); } { - ( str = String() { argument = DirectiveArgument.string(str); } - | str = absoluteIri() { + ( LOOKAHEAD(String()) str = String() { argument = DirectiveArgument.string(str); } + | LOOKAHEAD(absoluteIri()) str = absoluteIri() { URL url; try { url = new URL(str); @@ -460,6 +462,9 @@ MORE : { | < #PN_LOCAL : ( < PN_CHARS_U > | [ ":", "0"-"9" ] ) ( ( < PN_CHARS > | [ ".", ":" ] )* < PN_CHARS > )? > | < COMMA : "," > + | < NAMED_NULL : "_:" ( < PN_CHARS_U > | [ "0"-"9" ] ) (( < PN_CHARS > | "." )* < PN_CHARS > )? > { + matchedToken.image = JavaCCParserBase.stripChars(matchedToken.image, 2); + } } < DEFAULT, BODY, TERM, DIRECTIVE_ARGUMENTS, PAREN_DELIMITED > TOKEN : { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 54c4b7212..008314039 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -27,12 +27,14 @@ import org.semanticweb.vlog4j.core.model.api.AbstractConstant; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.NamedNull; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.DefaultParserConfiguration; import org.semanticweb.vlog4j.parser.DirectiveArgument; @@ -142,6 +144,10 @@ AbstractConstant createConstant(String lexicalForm) throws ParseException { return Expressions.makeAbstractConstant(absoluteIri); } + NamedNull createNamedNull(String lexicalForm) { + return new NamedNullImpl(lexicalForm); + } + /** * Creates a suitable {@link Constant} from the parsed data. * @@ -332,6 +338,10 @@ boolean isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter delimiter) return parserConfiguration.isConfigurableLiteralRegistered(delimiter); } + boolean isParsingOfNamedNullsAllowed() { + return parserConfiguration.isParsingOfNamedNullsAllowed(); + } + void setBase(String baseIri) throws PrefixDeclarationException { prefixDeclarations.setBase(baseIri); } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserParseFactTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserParseFactTest.java index 876b01ae8..0d0bd03be 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserParseFactTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserParseFactTest.java @@ -25,8 +25,11 @@ import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.NamedNull; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; +import org.semanticweb.vlog4j.parser.ParserConfiguration; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; @@ -34,35 +37,51 @@ public class RuleParserParseFactTest { private final Constant a = Expressions.makeDatatypeConstant("a", PrefixDeclarations.XSD_STRING); private final Constant b = Expressions.makeDatatypeConstant("b", PrefixDeclarations.XSD_STRING); + private final NamedNull null1 = new NamedNullImpl("1"); private final Fact factA = Expressions.makeFact("p", a); private final Fact factAB = Expressions.makeFact("p", a, b); + private final Fact fact1 = Expressions.makeFact("p", null1); @Test - public void testFactArityOne() throws ParsingException { + public void parseFact_string_succeeds() throws ParsingException { assertEquals(RuleParser.parseFact("p(\"a\") ."), factA); } @Test - public void testFactArityOneWithDataType() throws ParsingException { - assertEquals(RuleParser.parseFact("p(\"a\") ."), factA); - } - - @Test - public void testFactArityTwo() throws ParsingException { + public void parseFact_twoStrings_succeeds() throws ParsingException { assertEquals(RuleParser.parseFact("p(\"a\",\"b\") ."), factAB); } @Test(expected = ParsingException.class) - public void testFactWithVariable() throws ParsingException { + public void parseFact_nonGroundFact_throws() throws ParsingException { String input = "p(?X) ."; RuleParser.parseFact(input); } @Test(expected = ParsingException.class) - public void testZeroArityFact() throws ParsingException { + public void parseFact_arityZeroFact_throws() throws ParsingException { String input = "p() ."; RuleParser.parseFact(input); } + @Test(expected = ParsingException.class) + public void parseFact_namedNull_throws() throws ParsingException { + String input = "p(_:1) ."; + RuleParser.parseFact(input); + } + + @Test + public void parseFact_namedNullAllowed_succeeds() throws ParsingException { + String input = "p(_:1) ."; + ParserConfiguration parserConfiguration = new ParserConfiguration().allowNamedNulls(); + assertEquals(RuleParser.parseFact(input, parserConfiguration), fact1); + } + + @Test(expected = ParsingException.class) + public void parseFact_namedNullAsPredicateName_throws() throws ParsingException { + String input = "_:p(\"a\") ."; + ParserConfiguration parserConfiguration = new ParserConfiguration().allowNamedNulls(); + RuleParser.parseFact(input, parserConfiguration); + } } From b7271a58269049a9eb480ceb2940bc9738aadb77 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 3 Feb 2020 18:27:36 +0100 Subject: [PATCH 0530/1003] Parser: Add support for custom directives --- .../vlog4j/parser/DirectiveHandler.java | 19 ++++- .../vlog4j/parser/ParserConfiguration.java | 74 +++++++++++++++++-- .../vlog4j/parser/javacc/JavaCCParser.jj | 29 ++++++-- .../parser/javacc/JavaCCParserBase.java | 10 ++- 4 files changed, 113 insertions(+), 19 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java index 2883225be..54ebebb90 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java @@ -22,6 +22,8 @@ import java.util.List; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.parser.javacc.JavaCCParser; import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; /** @@ -44,7 +46,7 @@ public interface DirectiveHandler { * directive, or the number of arguments is invalid. * @return a {@code T} instance corresponding to the given arguments. */ - T handleDirective(List arguments, final SubParserFactory subParserFactory) + public T handleDirective(List arguments, final SubParserFactory subParserFactory) throws ParsingException; /** @@ -56,11 +58,24 @@ T handleDirective(List arguments, final SubParserFactory subP * @throws ParsingException when the given number of Arguments is invalid for * the Directive statement. */ - static void validateNumberOfArguments(final List arguments, final int number) + public static void validateNumberOfArguments(final List arguments, final int number) throws ParsingException { if (arguments.size() != number) { throw new ParsingException( "Invalid number of arguments " + arguments.size() + " for Directive statement, expected " + number); } } + + /** + * Obtain a {@link KnowledgeBase} from a {@link SubParserFactory}. + * + * @argument subParserFactory the SubParserFactory. + * + * @return the knowledge base. + */ + default KnowledgeBase getKnowledgeBase(SubParserFactory subParserFactory) { + JavaCCParser subParser = subParserFactory.makeSubParser(""); + + return subParser.getKnowledgeBase(); + } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index 6e97f8245..5f59f75da 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -21,6 +21,7 @@ */ import java.util.HashMap; +import java.util.Arrays; import java.util.List; import org.apache.commons.lang3.Validate; @@ -30,6 +31,7 @@ import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase.ConfigurableLiteralDelimiter; import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; @@ -40,7 +42,12 @@ */ public class ParserConfiguration { /** - * Whether to allow parsing Named Nulls. + * Reserved directive names that are not allowed to be registered. + */ + public static final List RESERVED_DIRECTIVE_NAMES = Arrays.asList("base", "prefix", "source"); + + /** + * Whether parsing Named Nulls is allowed. */ private boolean allowNamedNulls = false; @@ -59,6 +66,11 @@ public class ParserConfiguration { */ private HashMap literals = new HashMap<>(); + /** + * The registered custom directives. + */ + private HashMap> directives = new HashMap<>(); + /** * Register a new (type of) Data Source. * @@ -194,23 +206,68 @@ public ParserConfiguration registerDatatype(final String name, final DatatypeCon * @argument delimiter the delimiter to handle. * @argument handler the handler for this literal type. * - * @throws IllegalArgumentException when the literal delimiter has - * already been registered. + * @throws IllegalArgumentException when the literal delimiter has already been + * registered. * * @return this */ public ParserConfiguration registerLiteral(ConfigurableLiteralDelimiter delimiter, ConfigurableLiteralHandler handler) throws IllegalArgumentException { - if (literals.containsKey(delimiter)) { - throw new IllegalArgumentException("Literal delimiter \"" + delimiter + "\" is already registered."); - } + Validate.isTrue(!this.literals.containsKey(delimiter), "Literal delimiter \"%s\" is already registered.", + delimiter); this.literals.put(delimiter, handler); return this; } /** - * Set whether to allow parsing of {@link org.semanticweb.vlog4j.core.model.api.NamedNull}. + * Register a directive. + * + * @argument name the name of the directive. + * @argument handler the handler for this directive. + * + * @throws IllegalArgumentException when the directive name has already been + * registered, or is a reserved name (i.e., one + * of {@code base}, {@code prefix}, and + * {@code source}). + * + * @return this + */ + public ParserConfiguration registerDirective(String name, DirectiveHandler handler) + throws IllegalArgumentException { + Validate.isTrue(!RESERVED_DIRECTIVE_NAMES.contains(name), "The name \"%s\" is a reserved directive name.", + name); + Validate.isTrue(!this.directives.containsKey(name), "The directive \"%s\" is already registered.", name); + + this.directives.put(name, handler); + return this; + } + + /** + * Parse a directive statement. + * + * @argument name the name of the directive. + * @argument arguments the arguments given in the statement. + * + * @throws ParsingException when the directive is not known, or the arguments + * are invalid for the directive. + * + * @return the (possibly updated) KnowledgeBase + */ + public KnowledgeBase parseDirectiveStatement(String name, List arguments, SubParserFactory subParserFactory) + throws ParsingException { + final DirectiveHandler handler = this.directives.get(name); + + if (handler == null) { + throw new ParsingException("Directive \"" + name + "\" is not known."); + } + + return handler.handleDirective(arguments, subParserFactory); + } + + /** + * Set whether to allow parsing of + * {@link semanticweb.vlog4j.core.model.api.NamedNull}. * * @argument allow true allows parsing of named nulls. * @@ -240,7 +297,8 @@ public ParserConfiguration disallowNamedNulls() { } /** - * Whether parsing of {@link org.semanticweb.vlog4j.core.model.api.NamedNull} is allowed. + * Whether parsing of {@link org.semanticweb.vlog4j.core.model.api.NamedNull} is + * allowed. * * @return this */ diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 631e5c422..7024f63c3 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -41,6 +41,7 @@ import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; @@ -126,8 +127,8 @@ void source() throws PrefixDeclarationException : { } DataSource dataSource() throws PrefixDeclarationException : { - Token sourceName; - List< DirectiveArgument > arguments; + Token sourceName; + List< DirectiveArgument > arguments; } { ( sourceName = < ARGUMENT_NAME > | sourceName = < VARORPREDNAME > ) < LPAREN > arguments = Arguments() < RPAREN > { @@ -135,14 +136,24 @@ DataSource dataSource() throws PrefixDeclarationException : { } } +KnowledgeBase directive() throws PrefixDeclarationException : { + Token name; + List< DirectiveArgument > arguments; +} { + name = < CUSTOM_DIRECTIVE > arguments = Arguments() < DOT > { + return parseDirectiveStatement(name.image, arguments, getSubParserFactory()); + } +} + void statement() throws PrefixDeclarationException : { Statement statement; + KnowledgeBase knowledgeBase; resetVariableSets(); } { - ( LOOKAHEAD(rule()) statement = rule() - | statement = fact(FormulaContext.HEAD) ) { - addStatement(statement); - } + ( LOOKAHEAD(rule()) statement = rule() { addStatement(statement); } + | statement = fact(FormulaContext.HEAD) { addStatement(statement); } + | knowledgeBase = directive() { setKnowledgeBase(knowledgeBase); } + ) } Rule rule() throws PrefixDeclarationException : { @@ -274,7 +285,7 @@ Term term(FormulaContext context) throws PrefixDeclarationException : { | try { tt = ConfigurableLiteral () { return tt; } } catch (ParsingException e) { - throw new ParseException("Invalid configurable literal expression: " + e.getMessage()); + throw makeParseExceptionWithCause("Invalid configurable literal expression", e); } } @@ -513,7 +524,9 @@ TOKEN : { < BASE : "base" > : DIRECTIVE_ARGUMENTS | < PREFIX : "prefix" > : DIRECTIVE_ARGUMENTS | < SOURCE : "source" > : DIRECTIVE_ARGUMENTS - | < CUSTOM_DIRECTIVE : < DIRECTIVENAME > > : DIRECTIVE_ARGUMENTS + | < CUSTOM_DIRECTIVE : < DIRECTIVENAME > > { + matchedToken.image = JavaCCParserBase.stripChars(matchedToken.image, 1); + }: DIRECTIVE_ARGUMENTS | < DIRECTIVENAME : [ "a"-"z", "A"-"Z" ] ([ "a"-"z", "A"-"Z", "0"-"9", "-", "_" ])* > } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 008314039..7ebeb6e9d 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -331,7 +331,15 @@ DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(String syntacticFo Term parseConfigurableLiteral(ConfigurableLiteralDelimiter delimiter, String syntacticForm, SubParserFactory subParserFactory) throws ParsingException { - return parserConfiguration.parseConfigurableLiteral(delimiter, syntacticForm, subParserFactory); + return parserConfiguration.parseConfigurableLiteral(delimiter, syntacticForm, subParserFactory); + } + + KnowledgeBase parseDirectiveStatement(String name, List arguments, SubParserFactory subParserFactory) throws ParseException { + try { + return parserConfiguration.parseDirectiveStatement(name, arguments, subParserFactory); + } catch (ParsingException e) { + throw makeParseExceptionWithCause("Failed while trying to parse directive statement", e); + } } boolean isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter delimiter) { From 10756350a6986d7ec56d9ae4c2e8754943d9cef0 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 6 Feb 2020 17:01:08 +0100 Subject: [PATCH 0531/1003] Parser: Support `@import` statement in rules files --- .../parser/DefaultParserConfiguration.java | 2 + .../vlog4j/parser/DirectiveHandler.java | 103 ++++++++++++++++++ .../CsvFileDataSourceDeclarationHandler.java | 12 +- .../RdfFileDataSourceDeclarationHandler.java | 12 +- ...eryResultDataSourceDeclarationHandler.java | 29 +---- .../ImportFileDirectiveHandler.java | 63 +++++++++++ .../vlog4j/syntax/parser/RuleParserTest.java | 24 ++-- vlog4j-parser/src/test/resources/facts.rls | 4 + 8 files changed, 196 insertions(+), 53 deletions(-) create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileDirectiveHandler.java create mode 100644 vlog4j-parser/src/test/resources/facts.rls diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java index a52f02b87..383cca87c 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java @@ -24,6 +24,7 @@ import org.semanticweb.vlog4j.parser.datasources.CsvFileDataSourceDeclarationHandler; import org.semanticweb.vlog4j.parser.datasources.RdfFileDataSourceDeclarationHandler; import org.semanticweb.vlog4j.parser.datasources.SparqlQueryResultDataSourceDeclarationHandler; +import org.semanticweb.vlog4j.parser.directives.ImportFileDirectiveHandler; /** * Default parser configuration. Registers default data sources. @@ -34,6 +35,7 @@ public class DefaultParserConfiguration extends ParserConfiguration { public DefaultParserConfiguration() { super(); registerDefaultDataSources(); + registerDirective("import", new ImportFileDirectiveHandler()); } /** diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java index 54ebebb90..2c5cd6954 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java @@ -20,8 +20,13 @@ * #L% */ +import java.io.File; +import java.io.IOException; +import java.net.URL; import java.util.List; +import java.util.NoSuchElementException; +import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.javacc.JavaCCParser; import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; @@ -66,6 +71,91 @@ public static void validateNumberOfArguments(final List argum } } + /** + * Validate that the provided argument is a {@link String}. + * + * @param argument the argument to validate + * @param description a description of the argument, used in constructing the + * error message. + * + * @throws ParsingException when the given argument is not a {@link String}. + * + * @return the contained {@link String}. + */ + public static String validateStringArgument(final DirectiveArgument argument, final String description) + throws ParsingException { + try { + return argument.fromString().get(); + } catch (NoSuchElementException e) { + throw new ParsingException(description + "\"" + argument + "\" is not a string.", e); + } + } + + /** + * Validate that the provided argument is a file name. + * + * @param argument the argument to validate + * @param description a description of the argument, used in constructing the + * error message. + * + * @throws ParsingException when the given argument is not a valid file name. + * + * @return the File corresponding to the contained file name. + */ + public static File validateFilenameArgument(final DirectiveArgument argument, final String description) + throws ParsingException { + String fileName = DirectiveHandler.validateStringArgument(argument, description); + File file = new File(fileName); + try { + // we don't care about the actual path, just that there is one. + file.getCanonicalPath(); + } catch (IOException e) { + throw new ParsingException(description + "\"" + argument + "\" is not a valid file path.", e); + } + + return file; + } + + /** + * Validate that the provided argument is an IRI. + * + * @param argument the argument to validate + * @param description a description of the argument, used in constructing the + * error message. + * + * @throws ParsingException when the given argument is not an IRI. + * + * @return the contained IRI. + */ + public static URL validateIriArgument(final DirectiveArgument argument, final String description) + throws ParsingException { + try { + return argument.fromIri().get(); + } catch (NoSuchElementException e) { + throw new ParsingException(description + "\"" + argument + "\" is not an IRI.", e); + } + } + + /** + * Validate that the provided argument is a {@link Term}. + * + * @param argument the argument to validate + * @param description a description of the argument, used in constructing the + * error message. + * + * @throws ParsingException when the given argument is not a {@link Term}. + * + * @return the contained {@link Term}. + */ + public static Term validateTermArgument(final DirectiveArgument argument, final String description) + throws ParsingException { + try { + return argument.fromTerm().get(); + } catch (NoSuchElementException e) { + throw new ParsingException(description + "\"" + argument + "\" is not a string.", e); + } + } + /** * Obtain a {@link KnowledgeBase} from a {@link SubParserFactory}. * @@ -78,4 +168,17 @@ default KnowledgeBase getKnowledgeBase(SubParserFactory subParserFactory) { return subParser.getKnowledgeBase(); } + + /** + * Obtain a {@link ParserConfiguration} from a {@link SubParserFactory}. + * + * @argument subParserFactory the SubParserFactory. + * + * @return the parser configuration. + */ + default ParserConfiguration getParserConfiguration(SubParserFactory subParserFactory) { + JavaCCParser subParser = subParserFactory.makeSubParser(""); + + return subParser.getParserConfiguration(); + } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java index edd66a12f..5d6fac9db 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java @@ -23,7 +23,6 @@ import java.io.File; import java.io.IOException; import java.util.List; -import java.util.NoSuchElementException; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; @@ -43,19 +42,12 @@ public class CsvFileDataSourceDeclarationHandler implements DataSourceDeclaratio public DataSource handleDirective(List arguments, final SubParserFactory subParserFactory) throws ParsingException { DirectiveHandler.validateNumberOfArguments(arguments, 1); - DirectiveArgument fileNameArgument = arguments.get(0); - String fileName; - try { - fileName = fileNameArgument.fromString().get(); - } catch (NoSuchElementException e) { - throw new ParsingException("File name \"" + fileNameArgument + "\" is not a string.", e); - } + File file = DirectiveHandler.validateFilenameArgument(arguments.get(0), "source file"); - File file = new File(fileName); try { return new CsvFileDataSource(file); } catch (IOException e) { - throw new ParsingException("Could not use source file \"" + fileName + "\": " + e.getMessage(), e); + throw new ParsingException("Could not use source file \"" + file.getName() + "\": " + e.getMessage(), e); } } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java index 32a5f6b23..1018a3f51 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java @@ -23,7 +23,6 @@ import java.io.File; import java.io.IOException; import java.util.List; -import java.util.NoSuchElementException; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; @@ -43,19 +42,12 @@ public class RdfFileDataSourceDeclarationHandler implements DataSourceDeclaratio public DataSource handleDirective(List arguments, final SubParserFactory subParserFactory) throws ParsingException { DirectiveHandler.validateNumberOfArguments(arguments, 1); - DirectiveArgument fileNameArgument = arguments.get(0); - String fileName; - try { - fileName = fileNameArgument.fromString().get(); - } catch (NoSuchElementException e) { - throw new ParsingException("File name \"" + fileNameArgument + "\" is not a string.", e); - } + File file = DirectiveHandler.validateFilenameArgument(arguments.get(0), "source file"); - File file = new File(fileName); try { return new RdfFileDataSource(file); } catch (IOException e) { - throw new ParsingException("Could not use source file \"" + fileName + "\": " + e.getMessage(), e); + throw new ParsingException("Could not use source file \"" + file.getName() + "\": " + e.getMessage(), e); } } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java index cda6f38b9..ff178435c 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java @@ -22,7 +22,6 @@ import java.net.URL; import java.util.List; -import java.util.NoSuchElementException; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; @@ -42,31 +41,9 @@ public class SparqlQueryResultDataSourceDeclarationHandler implements DataSource public DataSource handleDirective(List arguments, final SubParserFactory subParserFactory) throws ParsingException { DirectiveHandler.validateNumberOfArguments(arguments, 3); - - DirectiveArgument endpointArgument = arguments.get(0); - URL endpoint; - try { - endpoint = endpointArgument.fromIri().get(); - } catch (NoSuchElementException e) { - throw new ParsingException( - "SPARQL endpoint \"" + endpointArgument + "\" is not a valid IRI: " + e.getMessage(), e); - } - - DirectiveArgument variablesArgument = arguments.get(1); - String variables; - try { - variables = variablesArgument.fromString().get(); - } catch (NoSuchElementException e) { - throw new ParsingException("Variables list \"" + variablesArgument + "\" is not a string.", e); - } - - DirectiveArgument queryArgument = arguments.get(2); - String query; - try { - query = queryArgument.fromString().get(); - } catch (NoSuchElementException e) { - throw new ParsingException("Query fragment \"" + queryArgument + "\" is not a string.", e); - } + URL endpoint = DirectiveHandler.validateIriArgument(arguments.get(0), "SPARQL endpoint"); + String variables = DirectiveHandler.validateStringArgument(arguments.get(1), "variables list"); + String query = DirectiveHandler.validateStringArgument(arguments.get(2), "query fragment"); return new SparqlQueryResultDataSource(endpoint, variables, query); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileDirectiveHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileDirectiveHandler.java new file mode 100644 index 000000000..4334cfb01 --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileDirectiveHandler.java @@ -0,0 +1,63 @@ +package org.semanticweb.vlog4j.parser.directives; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.List; + +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.parser.DirectiveArgument; +import org.semanticweb.vlog4j.parser.DirectiveHandler; +import org.semanticweb.vlog4j.parser.ParserConfiguration; +import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.RuleParser; +import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; + +/** + * Handler for parsing {@code @import} statements. + * + * @author Maximilian Marx + */ +public class ImportFileDirectiveHandler implements DirectiveHandler { + @Override + public KnowledgeBase handleDirective(List arguments, final SubParserFactory subParserFactory) + throws ParsingException { + DirectiveHandler.validateNumberOfArguments(arguments, 1); + File file = DirectiveHandler.validateFilenameArgument(arguments.get(0), "rules file"); + FileInputStream stream; + + // @todo(mx): should we try to prevent cyclic imports? + try { + stream = new FileInputStream(file); + } catch (IOException e) { + throw new ParsingException("Failed to read rules from \"" + file.getName() + "\"", e); + } + + KnowledgeBase knowledgeBase = getKnowledgeBase(subParserFactory); + ParserConfiguration parserConfiguration = getParserConfiguration(subParserFactory); + + RuleParser.parseInto(knowledgeBase, stream, parserConfiguration); + + return knowledgeBase; + } +} diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 061fcc00f..62e87cdc5 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -25,6 +25,7 @@ import java.util.ArrayList; import java.util.Arrays; +import java.util.List; import org.junit.Test; import org.mockito.ArgumentMatchers; @@ -40,6 +41,7 @@ import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.DatatypeConstantHandler; import org.semanticweb.vlog4j.parser.ParserConfiguration; import org.semanticweb.vlog4j.parser.ParsingException; @@ -58,7 +60,7 @@ public class RuleParserTest { private final Literal atom2 = Expressions.makePositiveLiteral("http://example.org/p", x, z); private final PositiveLiteral atom3 = Expressions.makePositiveLiteral("http://example.org/q", x, y); private final PositiveLiteral atom4 = Expressions.makePositiveLiteral("http://example.org/r", x, d); - private final Fact fact = Expressions.makeFact("http://example.org/s", c); + private final PositiveLiteral fact1 = Expressions.makePositiveLiteral("http://example.org/s", c); private final PositiveLiteral fact2 = Expressions.makePositiveLiteral("p", abc); private final Conjunction body1 = Expressions.makeConjunction(atom1, atom2); private final Conjunction body2 = Expressions.makeConjunction(negAtom1, atom2); @@ -70,28 +72,28 @@ public class RuleParserTest { public void testExplicitIri() throws ParsingException { String input = "() ."; ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(fact), statements); + assertEquals(Arrays.asList(fact1), statements); } @Test public void testPrefixResolution() throws ParsingException { String input = "@prefix ex: . ex:s(ex:c) ."; ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(fact), statements); + assertEquals(Arrays.asList(fact1), statements); } @Test public void testBaseRelativeResolution() throws ParsingException { String input = "@base . () ."; ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(fact), statements); + assertEquals(Arrays.asList(fact1), statements); } @Test public void testBaseResolution() throws ParsingException { String input = "@base . s(c) ."; ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(fact), statements); + assertEquals(Arrays.asList(fact1), statements); } @Test @@ -317,7 +319,7 @@ public void testUnicodeLiteral() throws ParsingException { public void testUnicodeUri() throws ParsingException { String input = "@base . @prefix ex: . ex:\\u0073(c) ."; ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(fact), statements); + assertEquals(Arrays.asList(fact1), statements); } @Test @@ -340,7 +342,7 @@ public void testLineComments() throws ParsingException { String input = "@prefix ex: . % comment \n" + "%@prefix ex: \n" + " ex:s(ex:c) . % comment \n"; ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(fact), statements); + assertEquals(Arrays.asList(fact1), statements); } @Test @@ -439,4 +441,12 @@ public void testCustomDatatype() throws ParsingException { assertEquals(constant, result); } + @Test + public void parse_importStatement_succeeds() throws ParsingException { + String input = "@import \"src/test/resources/facts.rls\" ."; + KnowledgeBase knowledgeBase = RuleParser.parse(input); + List expected = Arrays.asList(fact1, fact2); + List result = knowledgeBase.getFacts(); + assertEquals(expected, result); + } } diff --git a/vlog4j-parser/src/test/resources/facts.rls b/vlog4j-parser/src/test/resources/facts.rls new file mode 100644 index 000000000..ce985f5a9 --- /dev/null +++ b/vlog4j-parser/src/test/resources/facts.rls @@ -0,0 +1,4 @@ +@prefix ex: . + +ex:s(ex:c) . +p("abc") . From 1e508b5a0a3ed40858d0621dbe59be67a7c970ec Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 10 Feb 2020 17:43:31 +0100 Subject: [PATCH 0532/1003] Parser: Avoid duplicate imports in @import statements --- .../vlog4j/core/reasoner/KnowledgeBase.java | 903 +++++++++--------- .../ImportFileDirectiveHandler.java | 27 +- 2 files changed, 483 insertions(+), 447 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java index d63f9be70..3198deafa 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -1,434 +1,469 @@ -package org.semanticweb.vlog4j.core.reasoner; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.LinkedHashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import org.apache.commons.lang3.Validate; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Statement; -import org.semanticweb.vlog4j.core.model.api.StatementVisitor; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 - 2019 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -/** - * A knowledge base with rules, facts, and declarations for loading data from - * further sources. This is a "syntactic" object in that it represents some - * information that is not relevant for the semantics of reasoning, but that is - * needed to ensure faithful re-serialisation of knowledge bases loaded from - * files (e.g., preserving order). - * - * @author Markus Kroetzsch - * - */ -public class KnowledgeBase implements Iterable { - - private final Set listeners = new HashSet<>(); - - /** - * Auxiliary class to process {@link Statement}s when added to the knowledge - * base. Returns true if a statement was added successfully. - * - * @author Markus Kroetzsch - * - */ - private class AddStatementVisitor implements StatementVisitor { - @Override - public Boolean visit(final Fact statement) { - KnowledgeBase.this.addFact(statement); - return true; - } - - @Override - public Boolean visit(final Rule statement) { - return true; - } - - @Override - public Boolean visit(final DataSourceDeclaration statement) { - KnowledgeBase.this.dataSourceDeclarations.add(statement); - return true; - } - } - - private final AddStatementVisitor addStatementVisitor = new AddStatementVisitor(); - - /** - * Auxiliary class to process {@link Statement}s when removed from the knowledge - * base. Returns true if a statement was removed successfully. - * - * @author Irina Dragoste - * - */ - private class RemoveStatementVisitor implements StatementVisitor { - - @Override - public Boolean visit(final Fact statement) { - KnowledgeBase.this.removeFact(statement); - return true; - } - - @Override - public Boolean visit(final Rule statement) { - return true; - } - - @Override - public Boolean visit(final DataSourceDeclaration statement) { - KnowledgeBase.this.dataSourceDeclarations.remove(statement); - return true; - } - } - - private final RemoveStatementVisitor removeStatementVisitor = new RemoveStatementVisitor(); - - private class ExtractStatementsVisitor implements StatementVisitor { - - final ArrayList extracted = new ArrayList<>(); - final Class ownType; - - ExtractStatementsVisitor(final Class type) { - this.ownType = type; - } - - ArrayList getExtractedStatements() { - return this.extracted; - } - - @SuppressWarnings("unchecked") - @Override - public Void visit(final Fact statement) { - if (this.ownType.equals(Fact.class)) { - this.extracted.add((T) statement); - } - return null; - } - - @SuppressWarnings("unchecked") - @Override - public Void visit(final Rule statement) { - if (this.ownType.equals(Rule.class)) { - this.extracted.add((T) statement); - } - return null; - } - - @SuppressWarnings("unchecked") - @Override - public Void visit(final DataSourceDeclaration statement) { - if (this.ownType.equals(DataSourceDeclaration.class)) { - this.extracted.add((T) statement); - } - return null; - } - } - - /** - * The primary storage for the contents of the knowledge base. - */ - private final LinkedHashSet statements = new LinkedHashSet<>(); - -// TODO support prefixes -// /** -// * Known prefixes that can be used to pretty-print the contents of the knowledge -// * base. We try to preserve user-provided prefixes found in files when loading -// * data. -// */ -// PrefixDeclarations prefixDeclarations; - - /** - * Index structure that organises all facts by their predicate. - */ - private final Map> factsByPredicate = new HashMap<>(); - - /** - * Index structure that holds all data source declarations of this knowledge - * base. - */ - private final Set dataSourceDeclarations = new HashSet<>(); - - /** - * Registers a listener for changes on the knowledge base - * - * @param listener - */ - public void addListener(final KnowledgeBaseListener listener) { - this.listeners.add(listener); - } - - /** - * Unregisters given listener from changes on the knowledge base - * - * @param listener - */ - public void deleteListener(final KnowledgeBaseListener listener) { - this.listeners.remove(listener); - - } - - /** - * Adds a single statement to the knowledge base. - * - * @param statement the statement to be added - */ - public void addStatement(final Statement statement) { - if (this.doAddStatement(statement)) { - this.notifyListenersOnStatementAdded(statement); - } - } - - /** - * Adds a single statement to the knowledge base. - * - * @param statement the statement to be added - * @return true, if the knowledge base has changed. - */ - boolean doAddStatement(final Statement statement) { - Validate.notNull(statement, "Statement cannot be Null!"); - if (!this.statements.contains(statement) && statement.accept(this.addStatementVisitor)) { - this.statements.add(statement); - return true; - } - return false; - } - - /** - * Adds a collection of statements to the knowledge base. - * - * @param statements the statements to be added - */ - public void addStatements(final Collection statements) { - final List addedStatements = new ArrayList<>(); - - for (final Statement statement : statements) { - if (this.doAddStatement(statement)) { - addedStatements.add(statement); - } - } - - this.notifyListenersOnStatementsAdded(addedStatements); - } - - /** - * Adds a list of statements to the knowledge base. - * - * @param statements the statements to be added - */ - public void addStatements(final Statement... statements) { - final List addedStatements = new ArrayList<>(); - - for (final Statement statement : statements) { - if (this.doAddStatement(statement)) { - addedStatements.add(statement); - } - } - - this.notifyListenersOnStatementsAdded(addedStatements); - } - - /** - * Removes a single statement from the knowledge base. - * - * @param statement the statement to remove - */ - public void removeStatement(final Statement statement) { - if (this.doRemoveStatement(statement)) { - this.notifyListenersOnStatementRemoved(statement); - } - } - - /** - * Removes a single statement from the knowledge base. - * - * @param statement the statement to remove - * @return true, if the knowledge base has changed. - */ - boolean doRemoveStatement(final Statement statement) { - Validate.notNull(statement, "Statement cannot be Null!"); - - if (this.statements.contains(statement) && statement.accept(this.removeStatementVisitor)) { - this.statements.remove(statement); - return true; - } - return false; - } - - /** - * Removes a collection of statements to the knowledge base. - * - * @param statements the statements to remove - */ - public void removeStatements(final Collection statements) { - final List removedStatements = new ArrayList<>(); - - for (final Statement statement : statements) { - if (this.doRemoveStatement(statement)) { - removedStatements.add(statement); - } - } - - this.notifyListenersOnStatementsRemoved(removedStatements); - } - - /** - * Removes a list of statements from the knowledge base. - * - * @param statements the statements to remove - */ - public void removeStatements(final Statement... statements) { - final List removedStatements = new ArrayList<>(); - - for (final Statement statement : statements) { - if (this.doRemoveStatement(statement)) { - removedStatements.add(statement); - } - } - - this.notifyListenersOnStatementsRemoved(removedStatements); - } - - private void notifyListenersOnStatementAdded(final Statement addedStatement) { - for (final KnowledgeBaseListener listener : this.listeners) { - listener.onStatementAdded(addedStatement); - } - } - - private void notifyListenersOnStatementsAdded(final List addedStatements) { - if (!addedStatements.isEmpty()) { - for (final KnowledgeBaseListener listener : this.listeners) { - listener.onStatementsAdded(addedStatements); - } - } - } - - private void notifyListenersOnStatementRemoved(final Statement removedStatement) { - for (final KnowledgeBaseListener listener : this.listeners) { - listener.onStatementRemoved(removedStatement); - } - } - - private void notifyListenersOnStatementsRemoved(final List removedStatements) { - if (!removedStatements.isEmpty()) { - for (final KnowledgeBaseListener listener : this.listeners) { - listener.onStatementsRemoved(removedStatements); - } - } - } - - /** - * Get the list of all rules that have been added to the knowledge base. The - * list is read-only and cannot be modified to add or delete rules. - * - * @return list of {@link Rule}s - */ - public List getRules() { - return this.getStatementsByType(Rule.class); - } - - /** - * Get the list of all facts that have been added to the knowledge base. The - * list is read-only and cannot be modified to add or delete facts. - * - * @return list of {@link Fact}s - */ - public List getFacts() { - return this.getStatementsByType(Fact.class); - } - - /** - * Get the list of all data source declarations that have been added to the - * knowledge base. The list is read-only and cannot be modified to add or delete - * facts. - * - * @return list of {@link DataSourceDeclaration}s - */ - public List getDataSourceDeclarations() { - return this.getStatementsByType(DataSourceDeclaration.class); - } - - List getStatementsByType(final Class type) { - final ExtractStatementsVisitor visitor = new ExtractStatementsVisitor<>(type); - for (final Statement statement : this.statements) { - statement.accept(visitor); - } - return Collections.unmodifiableList(visitor.getExtractedStatements()); - } - - /** - * Add a single fact to the internal data structures. It is assumed that it has - * already been checked that this fact is not present yet. - * - * @param fact the fact to add - */ - void addFact(final Fact fact) { - final Predicate predicate = fact.getPredicate(); - this.factsByPredicate.putIfAbsent(predicate, new HashSet<>()); - this.factsByPredicate.get(predicate).add(fact); - } - - /** - * Removes a single fact from the internal data structure. It is assumed that it - * has already been checked that this fact is already present. - * - * @param fact the fact to remove - */ - void removeFact(final Fact fact) { - final Predicate predicate = fact.getPredicate(); - final Set facts = this.factsByPredicate.get(predicate); - facts.remove(fact); - if (facts.isEmpty()) { - this.factsByPredicate.remove(predicate); - } - } - - /** - * Returns all {@link Statement}s of this knowledge base. - * - * The result can be iterated over and will return statements in the original - * order. The collection is read-only and cannot be modified to add or delete - * statements. - * - * @return a collection of statements - */ - public Collection getStatements() { - return Collections.unmodifiableCollection(this.statements); - } - - @Override - public Iterator iterator() { - return Collections.unmodifiableCollection(this.statements).iterator(); - } - - Map> getFactsByPredicate() { - return this.factsByPredicate; - } - -} \ No newline at end of file +package org.semanticweb.vlog4j.core.reasoner; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.BiFunction; + +import org.apache.commons.lang3.Validate; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Statement; +import org.semanticweb.vlog4j.core.model.api.StatementVisitor; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * A knowledge base with rules, facts, and declarations for loading data from + * further sources. This is a "syntactic" object in that it represents some + * information that is not relevant for the semantics of reasoning, but that is + * needed to ensure faithful re-serialisation of knowledge bases loaded from + * files (e.g., preserving order). + * + * @author Markus Kroetzsch + * + */ +public class KnowledgeBase implements Iterable { + + private final Set listeners = new HashSet<>(); + + /** + * all (canonical) file paths imported so far, used to prevent cyclic imports. + */ + private final Set importedFilePaths = new HashSet<>(); + + /** + * Auxiliary class to process {@link Statement}s when added to the knowledge + * base. Returns true if a statement was added successfully. + * + * @author Markus Kroetzsch + * + */ + private class AddStatementVisitor implements StatementVisitor { + @Override + public Boolean visit(final Fact statement) { + KnowledgeBase.this.addFact(statement); + return true; + } + + @Override + public Boolean visit(final Rule statement) { + return true; + } + + @Override + public Boolean visit(final DataSourceDeclaration statement) { + KnowledgeBase.this.dataSourceDeclarations.add(statement); + return true; + } + } + + private final AddStatementVisitor addStatementVisitor = new AddStatementVisitor(); + + /** + * Auxiliary class to process {@link Statement}s when removed from the knowledge + * base. Returns true if a statement was removed successfully. + * + * @author Irina Dragoste + * + */ + private class RemoveStatementVisitor implements StatementVisitor { + + @Override + public Boolean visit(final Fact statement) { + KnowledgeBase.this.removeFact(statement); + return true; + } + + @Override + public Boolean visit(final Rule statement) { + return true; + } + + @Override + public Boolean visit(final DataSourceDeclaration statement) { + KnowledgeBase.this.dataSourceDeclarations.remove(statement); + return true; + } + } + + private final RemoveStatementVisitor removeStatementVisitor = new RemoveStatementVisitor(); + + private class ExtractStatementsVisitor implements StatementVisitor { + + final ArrayList extracted = new ArrayList<>(); + final Class ownType; + + ExtractStatementsVisitor(final Class type) { + this.ownType = type; + } + + ArrayList getExtractedStatements() { + return this.extracted; + } + + @SuppressWarnings("unchecked") + @Override + public Void visit(final Fact statement) { + if (this.ownType.equals(Fact.class)) { + this.extracted.add((T) statement); + } + return null; + } + + @SuppressWarnings("unchecked") + @Override + public Void visit(final Rule statement) { + if (this.ownType.equals(Rule.class)) { + this.extracted.add((T) statement); + } + return null; + } + + @SuppressWarnings("unchecked") + @Override + public Void visit(final DataSourceDeclaration statement) { + if (this.ownType.equals(DataSourceDeclaration.class)) { + this.extracted.add((T) statement); + } + return null; + } + } + + /** + * The primary storage for the contents of the knowledge base. + */ + private final LinkedHashSet statements = new LinkedHashSet<>(); + +// TODO support prefixes +// /** +// * Known prefixes that can be used to pretty-print the contents of the knowledge +// * base. We try to preserve user-provided prefixes found in files when loading +// * data. +// */ +// PrefixDeclarations prefixDeclarations; + + /** + * Index structure that organises all facts by their predicate. + */ + private final Map> factsByPredicate = new HashMap<>(); + + /** + * Index structure that holds all data source declarations of this knowledge + * base. + */ + private final Set dataSourceDeclarations = new HashSet<>(); + + /** + * Registers a listener for changes on the knowledge base + * + * @param listener + */ + public void addListener(final KnowledgeBaseListener listener) { + this.listeners.add(listener); + } + + /** + * Unregisters given listener from changes on the knowledge base + * + * @param listener + */ + public void deleteListener(final KnowledgeBaseListener listener) { + this.listeners.remove(listener); + + } + + /** + * Adds a single statement to the knowledge base. + * + * @param statement the statement to be added + */ + public void addStatement(final Statement statement) { + if (this.doAddStatement(statement)) { + this.notifyListenersOnStatementAdded(statement); + } + } + + /** + * Adds a single statement to the knowledge base. + * + * @param statement the statement to be added + * @return true, if the knowledge base has changed. + */ + boolean doAddStatement(final Statement statement) { + Validate.notNull(statement, "Statement cannot be Null!"); + if (!this.statements.contains(statement) && statement.accept(this.addStatementVisitor)) { + this.statements.add(statement); + return true; + } + return false; + } + + /** + * Adds a collection of statements to the knowledge base. + * + * @param statements the statements to be added + */ + public void addStatements(final Collection statements) { + final List addedStatements = new ArrayList<>(); + + for (final Statement statement : statements) { + if (this.doAddStatement(statement)) { + addedStatements.add(statement); + } + } + + this.notifyListenersOnStatementsAdded(addedStatements); + } + + /** + * Adds a list of statements to the knowledge base. + * + * @param statements the statements to be added + */ + public void addStatements(final Statement... statements) { + final List addedStatements = new ArrayList<>(); + + for (final Statement statement : statements) { + if (this.doAddStatement(statement)) { + addedStatements.add(statement); + } + } + + this.notifyListenersOnStatementsAdded(addedStatements); + } + + /** + * Removes a single statement from the knowledge base. + * + * @param statement the statement to remove + */ + public void removeStatement(final Statement statement) { + if (this.doRemoveStatement(statement)) { + this.notifyListenersOnStatementRemoved(statement); + } + } + + /** + * Removes a single statement from the knowledge base. + * + * @param statement the statement to remove + * @return true, if the knowledge base has changed. + */ + boolean doRemoveStatement(final Statement statement) { + Validate.notNull(statement, "Statement cannot be Null!"); + + if (this.statements.contains(statement) && statement.accept(this.removeStatementVisitor)) { + this.statements.remove(statement); + return true; + } + return false; + } + + /** + * Removes a collection of statements to the knowledge base. + * + * @param statements the statements to remove + */ + public void removeStatements(final Collection statements) { + final List removedStatements = new ArrayList<>(); + + for (final Statement statement : statements) { + if (this.doRemoveStatement(statement)) { + removedStatements.add(statement); + } + } + + this.notifyListenersOnStatementsRemoved(removedStatements); + } + + /** + * Removes a list of statements from the knowledge base. + * + * @param statements the statements to remove + */ + public void removeStatements(final Statement... statements) { + final List removedStatements = new ArrayList<>(); + + for (final Statement statement : statements) { + if (this.doRemoveStatement(statement)) { + removedStatements.add(statement); + } + } + + this.notifyListenersOnStatementsRemoved(removedStatements); + } + + private void notifyListenersOnStatementAdded(final Statement addedStatement) { + for (final KnowledgeBaseListener listener : this.listeners) { + listener.onStatementAdded(addedStatement); + } + } + + private void notifyListenersOnStatementsAdded(final List addedStatements) { + if (!addedStatements.isEmpty()) { + for (final KnowledgeBaseListener listener : this.listeners) { + listener.onStatementsAdded(addedStatements); + } + } + } + + private void notifyListenersOnStatementRemoved(final Statement removedStatement) { + for (final KnowledgeBaseListener listener : this.listeners) { + listener.onStatementRemoved(removedStatement); + } + } + + private void notifyListenersOnStatementsRemoved(final List removedStatements) { + if (!removedStatements.isEmpty()) { + for (final KnowledgeBaseListener listener : this.listeners) { + listener.onStatementsRemoved(removedStatements); + } + } + } + + /** + * Get the list of all rules that have been added to the knowledge base. The + * list is read-only and cannot be modified to add or delete rules. + * + * @return list of {@link Rule}s + */ + public List getRules() { + return this.getStatementsByType(Rule.class); + } + + /** + * Get the list of all facts that have been added to the knowledge base. The + * list is read-only and cannot be modified to add or delete facts. + * + * @return list of {@link Fact}s + */ + public List getFacts() { + return this.getStatementsByType(Fact.class); + } + + /** + * Get the list of all data source declarations that have been added to the + * knowledge base. The list is read-only and cannot be modified to add or delete + * facts. + * + * @return list of {@link DataSourceDeclaration}s + */ + public List getDataSourceDeclarations() { + return this.getStatementsByType(DataSourceDeclaration.class); + } + + List getStatementsByType(final Class type) { + final ExtractStatementsVisitor visitor = new ExtractStatementsVisitor<>(type); + for (final Statement statement : this.statements) { + statement.accept(visitor); + } + return Collections.unmodifiableList(visitor.getExtractedStatements()); + } + + /** + * Add a single fact to the internal data structures. It is assumed that it has + * already been checked that this fact is not present yet. + * + * @param fact the fact to add + */ + void addFact(final Fact fact) { + final Predicate predicate = fact.getPredicate(); + this.factsByPredicate.putIfAbsent(predicate, new HashSet<>()); + this.factsByPredicate.get(predicate).add(fact); + } + + /** + * Removes a single fact from the internal data structure. It is assumed that it + * has already been checked that this fact is already present. + * + * @param fact the fact to remove + */ + void removeFact(final Fact fact) { + final Predicate predicate = fact.getPredicate(); + final Set facts = this.factsByPredicate.get(predicate); + facts.remove(fact); + if (facts.isEmpty()) { + this.factsByPredicate.remove(predicate); + } + } + + /** + * Returns all {@link Statement}s of this knowledge base. + * + * The result can be iterated over and will return statements in the original + * order. The collection is read-only and cannot be modified to add or delete + * statements. + * + * @return a collection of statements + */ + public Collection getStatements() { + return Collections.unmodifiableCollection(this.statements); + } + + @Override + public Iterator iterator() { + return Collections.unmodifiableCollection(this.statements).iterator(); + } + + Map> getFactsByPredicate() { + return this.factsByPredicate; + } + + /** + * Import rules from a file. + * + * @param file the file to import + * @param parseFunction a function that transforms a {@link KnowledgeBase} using the {@link InputStream}. + * + * @throws IOException when reading {@code file} fails + * @throws IllegalArgumentException when {@code file} is null or has already been imported + * @throws RuntimeException when parseFunction throws + * + */ + public KnowledgeBase importRulesFile(File file, + BiFunction parseFunction) + throws RuntimeException, IOException, IllegalArgumentException { + Validate.notNull(file, "file must not be null"); + + boolean isNewFile = importedFilePaths.add(file.getCanonicalPath()); + Validate.isTrue(isNewFile, "file \"" + file.getName() + "\" was already imported."); + + try (InputStream stream = new FileInputStream(file)) { + return parseFunction.apply(stream, this); + } + } +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileDirectiveHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileDirectiveHandler.java index 4334cfb01..3be532ba7 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileDirectiveHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileDirectiveHandler.java @@ -21,8 +21,8 @@ */ import java.io.File; -import java.io.FileInputStream; import java.io.IOException; +import java.io.InputStream; import java.util.List; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; @@ -39,25 +39,26 @@ * @author Maximilian Marx */ public class ImportFileDirectiveHandler implements DirectiveHandler { + @Override public KnowledgeBase handleDirective(List arguments, final SubParserFactory subParserFactory) throws ParsingException { DirectiveHandler.validateNumberOfArguments(arguments, 1); File file = DirectiveHandler.validateFilenameArgument(arguments.get(0), "rules file"); - FileInputStream stream; - - // @todo(mx): should we try to prevent cyclic imports? - try { - stream = new FileInputStream(file); - } catch (IOException e) { - throw new ParsingException("Failed to read rules from \"" + file.getName() + "\"", e); - } - KnowledgeBase knowledgeBase = getKnowledgeBase(subParserFactory); ParserConfiguration parserConfiguration = getParserConfiguration(subParserFactory); - RuleParser.parseInto(knowledgeBase, stream, parserConfiguration); - - return knowledgeBase; + try { + return knowledgeBase.importRulesFile(file, (InputStream stream, KnowledgeBase kb) -> { + try { + RuleParser.parseInto(kb, stream, parserConfiguration); + } catch (ParsingException e) { + throw new RuntimeException(e); + } + return kb; + }); + } catch (RuntimeException | IOException e) { + throw new ParsingException("Failed while trying to import rules file \"" + file.getName() + "\"", e); + } } } From bdefe1d959a11bf07cee3450139cc449f9287fed Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 10 Feb 2020 19:04:42 +0100 Subject: [PATCH 0533/1003] Parser: support @import-relative statements respecting current @base --- .../parser/DefaultParserConfiguration.java | 8 ++- .../vlog4j/parser/DirectiveHandler.java | 14 ++++ .../semanticweb/vlog4j/parser/RuleParser.java | 36 ++++++++-- .../ImportFileRelativeDirectiveHandler.java | 67 +++++++++++++++++++ .../parser/javacc/JavaCCParserBase.java | 4 +- .../vlog4j/syntax/parser/RuleParserTest.java | 17 +++++ 6 files changed, 139 insertions(+), 7 deletions(-) create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java index 383cca87c..625f6f87c 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java @@ -25,6 +25,7 @@ import org.semanticweb.vlog4j.parser.datasources.RdfFileDataSourceDeclarationHandler; import org.semanticweb.vlog4j.parser.datasources.SparqlQueryResultDataSourceDeclarationHandler; import org.semanticweb.vlog4j.parser.directives.ImportFileDirectiveHandler; +import org.semanticweb.vlog4j.parser.directives.ImportFileRelativeDirectiveHandler; /** * Default parser configuration. Registers default data sources. @@ -35,7 +36,7 @@ public class DefaultParserConfiguration extends ParserConfiguration { public DefaultParserConfiguration() { super(); registerDefaultDataSources(); - registerDirective("import", new ImportFileDirectiveHandler()); + registerDefaultDirectives(); } /** @@ -47,4 +48,9 @@ private void registerDefaultDataSources() { registerDataSource(Serializer.SPARQL_QUERY_RESULT_DATA_SOURCE, new SparqlQueryResultDataSourceDeclarationHandler()); } + + private void registerDefaultDirectives() { + registerDirective("import", new ImportFileDirectiveHandler()); + registerDirective("import-relative", new ImportFileRelativeDirectiveHandler()); + } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java index 2c5cd6954..2a8c4a070 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java @@ -26,6 +26,7 @@ import java.util.List; import java.util.NoSuchElementException; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.javacc.JavaCCParser; @@ -181,4 +182,17 @@ default ParserConfiguration getParserConfiguration(SubParserFactory subParserFac return subParser.getParserConfiguration(); } + + /** + * Obtain {@link PrefixDeclarations} from a {@link SubParserFactory}. + * + * @argument subParserFactory the SubParserFactory. + * + * @return the prefix declarations. + */ + default PrefixDeclarations getPrefixDeclarations(SubParserFactory subParserFactory) { + JavaCCParser subParser = subParserFactory.makeSubParser(""); + + return subParser.getPrefixDeclarations(); + } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index 6b7a5eec2..d1395001e 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -25,12 +25,12 @@ import java.util.List; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; -import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.Entity; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; @@ -54,18 +54,46 @@ public class RuleParser { private static Logger LOGGER = LoggerFactory.getLogger(RuleParser.class); public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, final String encoding, - final ParserConfiguration parserConfiguration) throws ParsingException { + final ParserConfiguration parserConfiguration, final String baseIri) throws ParsingException { final JavaCCParser parser = new JavaCCParser(stream, encoding); + + if (baseIri != null) { + PrefixDeclarations prefixDeclarations = new LocalPrefixDeclarations(); + + try { + prefixDeclarations.setBase(baseIri); + } catch (PrefixDeclarationException e) { + throw new ParsingException("Invalid base IRI \"" + baseIri + "\"", e); + } + parser.setPrefixDeclarations(prefixDeclarations); + } + parser.setKnowledgeBase(knowledgeBase); parser.setParserConfiguration(parserConfiguration); doParse(parser); } + public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, final String encoding, + final ParserConfiguration parserConfiguration) throws ParsingException { + parseInto(knowledgeBase, stream, encoding, parserConfiguration, null); + } + + public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, + final ParserConfiguration parserConfiguration, final String baseIri) throws ParsingException { + parseInto(knowledgeBase, stream, DEFAULT_STRING_ENCODING, parserConfiguration, baseIri); + } + public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, final ParserConfiguration parserConfiguration) throws ParsingException { parseInto(knowledgeBase, stream, DEFAULT_STRING_ENCODING, parserConfiguration); } + public static void parseInto(final KnowledgeBase knowledgeBase, final String input, + final ParserConfiguration parserConfiguration, final String baseIri) throws ParsingException { + final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); + parseInto(knowledgeBase, inputStream, DEFAULT_STRING_ENCODING, parserConfiguration, baseIri); + } + public static void parseInto(final KnowledgeBase knowledgeBase, final String input, final ParserConfiguration parserConfiguration) throws ParsingException { final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); @@ -217,8 +245,8 @@ public static Term parseTerm(final String input) throws ParsingException { return parseTerm(input, (ParserConfiguration) null); } - public static DataSourceDeclaration parseDataSourceDeclaration(final String input, ParserConfiguration parserConfiguration) - throws ParsingException { + public static DataSourceDeclaration parseDataSourceDeclaration(final String input, + ParserConfiguration parserConfiguration) throws ParsingException { return parseSyntaxFragment(input, RuleParser::parseAndExtractDatasourceDeclaration, "data source declaration", parserConfiguration); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java new file mode 100644 index 000000000..9036699b7 --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java @@ -0,0 +1,67 @@ +package org.semanticweb.vlog4j.parser.directives; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.util.List; + +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.model.api.TermType; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.parser.DirectiveArgument; +import org.semanticweb.vlog4j.parser.DirectiveHandler; +import org.semanticweb.vlog4j.parser.ParserConfiguration; +import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.RuleParser; +import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; + +/** + * Handler for parsing {@code @import-relative} statements. + * + * @author Maximilian Marx + */ +public class ImportFileRelativeDirectiveHandler implements DirectiveHandler { + @Override + public KnowledgeBase handleDirective(List arguments, final SubParserFactory subParserFactory) + throws ParsingException { + DirectiveHandler.validateNumberOfArguments(arguments, 1); + PrefixDeclarations prefixDeclarations = getPrefixDeclarations(subParserFactory); + File file = DirectiveHandler.validateFilenameArgument(arguments.get(0), "rules file"); + KnowledgeBase knowledgeBase = getKnowledgeBase(subParserFactory); + ParserConfiguration parserConfiguration = getParserConfiguration(subParserFactory); + + try { + return knowledgeBase.importRulesFile(file, (InputStream stream, KnowledgeBase kb) -> { + try { + RuleParser.parseInto(kb, stream, parserConfiguration, prefixDeclarations.getBase()); + } catch (ParsingException e) { + throw new RuntimeException(e); + } + return kb; + }); + } catch (RuntimeException | IOException e) { + throw new ParsingException("Failed while trying to import rules file \"" + file.getName() + "\"", e); + } + } +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 7ebeb6e9d..2d6e9acaf 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -310,11 +310,11 @@ public ParserConfiguration getParserConfiguration() { return parserConfiguration; } - void setPrefixDeclarations(PrefixDeclarations prefixDeclarations) { + public void setPrefixDeclarations(PrefixDeclarations prefixDeclarations) { this.prefixDeclarations = prefixDeclarations; } - PrefixDeclarations getPrefixDeclarations() { + public PrefixDeclarations getPrefixDeclarations() { return prefixDeclarations; } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 62e87cdc5..63ab37258 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -62,6 +62,7 @@ public class RuleParserTest { private final PositiveLiteral atom4 = Expressions.makePositiveLiteral("http://example.org/r", x, d); private final PositiveLiteral fact1 = Expressions.makePositiveLiteral("http://example.org/s", c); private final PositiveLiteral fact2 = Expressions.makePositiveLiteral("p", abc); + private final PositiveLiteral fact3 = Expressions.makePositiveLiteral("http://example.org/p", abc); private final Conjunction body1 = Expressions.makeConjunction(atom1, atom2); private final Conjunction body2 = Expressions.makeConjunction(negAtom1, atom2); private final Conjunction head = Expressions.makePositiveConjunction(atom3, atom4); @@ -449,4 +450,20 @@ public void parse_importStatement_succeeds() throws ParsingException { List result = knowledgeBase.getFacts(); assertEquals(expected, result); } + + @Test + public void parse_relativeImportStatement_suceeds() throws ParsingException { + String input = "@base . @import-relative \"src/test/resources/facts.rls\" ."; + KnowledgeBase knowledgeBase = RuleParser.parse(input); + List expected = Arrays.asList(fact1, fact3); + List result = knowledgeBase.getFacts(); + assertEquals(expected, result); + } + + @Test(expected = ParsingException.class) + public void parseInto_duplicateImportStatements_throws() throws ParsingException { + String input = "@import \"src/test/resources/facts.rls\" . "; + KnowledgeBase knowledgeBase = RuleParser.parse(input); + RuleParser.parseInto(knowledgeBase, input); + } } From afa8e7e0861dda002e2e6701da6340ea3ecf3317 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 12 Feb 2020 17:31:40 +0100 Subject: [PATCH 0534/1003] Core: Fix style --- .../implementation/InMemoryDataSource.java | 22 +- .../implementation/VLogKnowledgeBase.java | 604 +++---- .../reasoner/implementation/VLogReasoner.java | 1411 +++++++++-------- 3 files changed, 1017 insertions(+), 1020 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java index 947b78078..c628cd023 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -31,7 +31,7 @@ * the object will directly accept tuples of constant names that are internally * stored in a form that can be passed to the reasoner directly, thereby saving * memory and loading time. - * + * * @author Markus Kroetzsch * */ @@ -47,11 +47,9 @@ public class InMemoryDataSource implements DataSource { * given capacity is the initial size of the space allocated. For best * efficiency, the actual number of facts should exactly correspond to this * capacity. - * - * @param arity - * the number of parameters in a fact from this source - * @param initialCapacity - * the planned number of facts + * + * @param arity the number of parameters in a fact from this source + * @param initialCapacity the planned number of facts */ public InMemoryDataSource(final int arity, final int initialCapacity) { this.capacity = initialCapacity; @@ -62,9 +60,8 @@ public InMemoryDataSource(final int arity, final int initialCapacity) { /** * Adds a fact to this data source. The number of constant names must agree with * the arity of this data source. - * - * @param constantNames - * the string names of the constants in this fact + * + * @param constantNames the string names of the constants in this fact */ public void addTuple(final String... constantNames) { if (constantNames.length != this.arity) { @@ -85,7 +82,7 @@ public void addTuple(final String... constantNames) { /** * Returns the data stored in this data source, in the format expected by the * VLog reasoner backend. - * + * * @return the data */ public String[][] getData() { @@ -108,5 +105,4 @@ public String getSyntacticRepresentation() { } return sb.toString(); } - } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java index 232ecafe9..cc192cce2 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java @@ -1,302 +1,302 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 - 2020 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Formatter; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; - -import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Statement; -import org.semanticweb.vlog4j.core.model.api.StatementVisitor; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; -import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; -import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; -import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; -import org.semanticweb.vlog4j.core.model.implementation.UniversalVariableImpl; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; - -/** - * Class for organizing a Knowledge Base using vLog-specific data structures. - * - * @author Irina Dragoste - * - */ -public class VLogKnowledgeBase { - - private final Map edbPredicates = new HashMap<>(); - private final Map aliasesForEdbPredicates = new HashMap<>(); - - private final Set aliasedEdbPredicates = new HashSet<>(); - - private final Set idbPredicates = new HashSet<>(); - - private final Map> directEdbFacts = new HashMap<>(); - - private final Set rules = new HashSet<>(); - - /** - * Package-protected constructor, that organizes given {@code knowledgeBase} in - * vLog-specific data structures. - * - * @param knowledgeBase - */ - VLogKnowledgeBase(final KnowledgeBase knowledgeBase) { - final LoadKbVisitor visitor = this.new LoadKbVisitor(); - visitor.clearIndexes(); - for (final Statement statement : knowledgeBase) { - statement.accept(visitor); - } - } - - boolean hasData() { - return !this.edbPredicates.isEmpty() && !this.aliasedEdbPredicates.isEmpty(); - } - - public boolean hasRules() { - return !this.rules.isEmpty(); - } - - Predicate getAlias(final Predicate predicate) { - if (this.edbPredicates.containsKey(predicate)) { - return predicate; - } else { - return this.aliasesForEdbPredicates.get(new LocalFactsDataSourceDeclaration(predicate)); - } - } - - String getVLogDataSourcesConfigurationString() { - final StringBuilder configStringBuilder = new StringBuilder(); - final Formatter formatter = new Formatter(configStringBuilder); - int dataSourceIndex = 0; - - for (final Entry e : this.edbPredicates.entrySet()) { - dataSourceIndex = addDataSourceConfigurationString(e.getValue().getDataSource(), e.getKey(), - dataSourceIndex, formatter); - } - - for (final Entry e : this.aliasesForEdbPredicates.entrySet()) { - dataSourceIndex = addDataSourceConfigurationString(e.getKey().getDataSource(), e.getValue(), - dataSourceIndex, formatter); - } - - formatter.close(); - return configStringBuilder.toString(); - } - - int addDataSourceConfigurationString(final DataSource dataSource, final Predicate predicate, - final int dataSourceIndex, final Formatter formatter) { - int newDataSourceIndex = dataSourceIndex; - - if (dataSource != null) { - if (dataSource instanceof VLogDataSource) { - final VLogDataSource vLogDataSource = (VLogDataSource) dataSource; - final String configString = vLogDataSource.toConfigString(); - if (configString != null) { - formatter.format(configString, dataSourceIndex, ModelToVLogConverter.toVLogPredicate(predicate)); - newDataSourceIndex++; - } - } - } - - return newDataSourceIndex; - } - - Map getEdbPredicates() { - return this.edbPredicates; - } - - Map getAliasesForEdbPredicates() { - return this.aliasesForEdbPredicates; - } - - Map> getDirectEdbFacts() { - return this.directEdbFacts; - } - - Set getRules() { - return this.rules; - } - - /** - * - * Local visitor implementation for processing statements upon loading. Internal - * index structures are updated based on the statements that are detected. - * - * @author Markus Kroetzsch - */ - - class LoadKbVisitor implements StatementVisitor { - - public void clearIndexes() { - VLogKnowledgeBase.this.edbPredicates.clear(); - VLogKnowledgeBase.this.idbPredicates.clear(); - VLogKnowledgeBase.this.aliasedEdbPredicates.clear(); - VLogKnowledgeBase.this.aliasesForEdbPredicates.clear(); - VLogKnowledgeBase.this.directEdbFacts.clear(); - VLogKnowledgeBase.this.rules.clear(); - } - - @Override - public Void visit(final Fact fact) { - final Predicate predicate = fact.getPredicate(); - registerEdbDeclaration(new LocalFactsDataSourceDeclaration(predicate)); - if (!VLogKnowledgeBase.this.directEdbFacts.containsKey(predicate)) { - final List facts = new ArrayList<>(); - facts.add(fact); - VLogKnowledgeBase.this.directEdbFacts.put(predicate, facts); - } else { - VLogKnowledgeBase.this.directEdbFacts.get(predicate).add(fact); - } - return null; - } - - @Override - public Void visit(final Rule statement) { - VLogKnowledgeBase.this.rules.add(statement); - for (final PositiveLiteral positiveLiteral : statement.getHead()) { - final Predicate predicate = positiveLiteral.getPredicate(); - if (!VLogKnowledgeBase.this.idbPredicates.contains(predicate)) { - if (VLogKnowledgeBase.this.edbPredicates.containsKey(predicate)) { - addEdbAlias(VLogKnowledgeBase.this.edbPredicates.get(predicate)); - VLogKnowledgeBase.this.edbPredicates.remove(predicate); - } - VLogKnowledgeBase.this.idbPredicates.add(predicate); - } - } - return null; - } - - @Override - public Void visit(final DataSourceDeclaration statement) { - registerEdbDeclaration(statement); - return null; - } - - void registerEdbDeclaration(final DataSourceDeclaration dataSourceDeclaration) { - final Predicate predicate = dataSourceDeclaration.getPredicate(); - if (VLogKnowledgeBase.this.idbPredicates.contains(predicate) - || VLogKnowledgeBase.this.aliasedEdbPredicates.contains(predicate)) { - if (!VLogKnowledgeBase.this.aliasesForEdbPredicates.containsKey(dataSourceDeclaration)) { - addEdbAlias(dataSourceDeclaration); - } - } else { - final DataSourceDeclaration currentMainDeclaration = VLogKnowledgeBase.this.edbPredicates.get(predicate); - if (currentMainDeclaration == null) { - VLogKnowledgeBase.this.edbPredicates.put(predicate, dataSourceDeclaration); - } else if (!currentMainDeclaration.equals(dataSourceDeclaration)) { - addEdbAlias(currentMainDeclaration); - addEdbAlias(dataSourceDeclaration); - VLogKnowledgeBase.this.edbPredicates.remove(predicate); - } // else: predicate already known to have local facts (only) - } - } - - void addEdbAlias(final DataSourceDeclaration dataSourceDeclaration) { - final Predicate predicate = dataSourceDeclaration.getPredicate(); - Predicate aliasPredicate; - if (dataSourceDeclaration instanceof LocalFactsDataSourceDeclaration) { - aliasPredicate = new PredicateImpl(predicate.getName() + "##FACT", predicate.getArity()); - } else { - aliasPredicate = new PredicateImpl(predicate.getName() + "##" + dataSourceDeclaration.hashCode(), - predicate.getArity()); - } - VLogKnowledgeBase.this.aliasesForEdbPredicates.put(dataSourceDeclaration, aliasPredicate); - VLogKnowledgeBase.this.aliasedEdbPredicates.add(predicate); - - final List terms = new ArrayList<>(); - for (int i = 1; i <= predicate.getArity(); i++) { - terms.add(new UniversalVariableImpl("X" + i)); - } - final Literal body = new PositiveLiteralImpl(aliasPredicate, terms); - final PositiveLiteral head = new PositiveLiteralImpl(predicate, terms); - final Rule rule = new RuleImpl(new ConjunctionImpl<>(Arrays.asList(head)), - new ConjunctionImpl<>(Arrays.asList(body))); - VLogKnowledgeBase.this.rules.add(rule); - } - - } - - /** - * Dummy data source declaration for predicates for which we have explicit local - * facts in the input. - * - * @author Markus Kroetzsch - * - */ - class LocalFactsDataSourceDeclaration implements DataSourceDeclaration { - - final Predicate predicate; - - public LocalFactsDataSourceDeclaration(Predicate predicate) { - this.predicate = predicate; - } - - @Override - public T accept(StatementVisitor statementVisitor) { - return statementVisitor.visit(this); - } - - @Override - public Predicate getPredicate() { - return this.predicate; - } - - @Override - public DataSource getDataSource() { - return null; - } - - @Override - public int hashCode() { - return this.predicate.hashCode(); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - final LocalFactsDataSourceDeclaration other = (LocalFactsDataSourceDeclaration) obj; - return this.predicate.equals(other.predicate); - } - } - -} +package org.semanticweb.vlog4j.core.reasoner.implementation; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2020 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Formatter; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; + +import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.Literal; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Statement; +import org.semanticweb.vlog4j.core.model.api.StatementVisitor; +import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; +import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; +import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; +import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; +import org.semanticweb.vlog4j.core.model.implementation.UniversalVariableImpl; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; + +/** + * Class for organizing a Knowledge Base using vLog-specific data structures. + * + * @author Irina Dragoste + * + */ +public class VLogKnowledgeBase { + + private final Map edbPredicates = new HashMap<>(); + private final Map aliasesForEdbPredicates = new HashMap<>(); + + private final Set aliasedEdbPredicates = new HashSet<>(); + + private final Set idbPredicates = new HashSet<>(); + + private final Map> directEdbFacts = new HashMap<>(); + + private final Set rules = new HashSet<>(); + + /** + * Package-protected constructor, that organizes given {@code knowledgeBase} in + * vLog-specific data structures. + * + * @param knowledgeBase + */ + VLogKnowledgeBase(final KnowledgeBase knowledgeBase) { + final LoadKbVisitor visitor = this.new LoadKbVisitor(); + visitor.clearIndexes(); + for (final Statement statement : knowledgeBase) { + statement.accept(visitor); + } + } + + boolean hasData() { + return !this.edbPredicates.isEmpty() && !this.aliasedEdbPredicates.isEmpty(); + } + + public boolean hasRules() { + return !this.rules.isEmpty(); + } + + Predicate getAlias(final Predicate predicate) { + if (this.edbPredicates.containsKey(predicate)) { + return predicate; + } else { + return this.aliasesForEdbPredicates.get(new LocalFactsDataSourceDeclaration(predicate)); + } + } + + String getVLogDataSourcesConfigurationString() { + final StringBuilder configStringBuilder = new StringBuilder(); + final Formatter formatter = new Formatter(configStringBuilder); + int dataSourceIndex = 0; + + for (final Entry e : this.edbPredicates.entrySet()) { + dataSourceIndex = addDataSourceConfigurationString(e.getValue().getDataSource(), e.getKey(), + dataSourceIndex, formatter); + } + + for (final Entry e : this.aliasesForEdbPredicates.entrySet()) { + dataSourceIndex = addDataSourceConfigurationString(e.getKey().getDataSource(), e.getValue(), + dataSourceIndex, formatter); + } + + formatter.close(); + return configStringBuilder.toString(); + } + + int addDataSourceConfigurationString(final DataSource dataSource, final Predicate predicate, + final int dataSourceIndex, final Formatter formatter) { + int newDataSourceIndex = dataSourceIndex; + + if (dataSource != null) { + if (dataSource instanceof VLogDataSource) { + final VLogDataSource vLogDataSource = (VLogDataSource) dataSource; + final String configString = vLogDataSource.toConfigString(); + if (configString != null) { + formatter.format(configString, dataSourceIndex, ModelToVLogConverter.toVLogPredicate(predicate)); + newDataSourceIndex++; + } + } + } + + return newDataSourceIndex; + } + + Map getEdbPredicates() { + return this.edbPredicates; + } + + Map getAliasesForEdbPredicates() { + return this.aliasesForEdbPredicates; + } + + Map> getDirectEdbFacts() { + return this.directEdbFacts; + } + + Set getRules() { + return this.rules; + } + + /** + * + * Local visitor implementation for processing statements upon loading. Internal + * index structures are updated based on the statements that are detected. + * + * @author Markus Kroetzsch + */ + + class LoadKbVisitor implements StatementVisitor { + + public void clearIndexes() { + VLogKnowledgeBase.this.edbPredicates.clear(); + VLogKnowledgeBase.this.idbPredicates.clear(); + VLogKnowledgeBase.this.aliasedEdbPredicates.clear(); + VLogKnowledgeBase.this.aliasesForEdbPredicates.clear(); + VLogKnowledgeBase.this.directEdbFacts.clear(); + VLogKnowledgeBase.this.rules.clear(); + } + + @Override + public Void visit(final Fact fact) { + final Predicate predicate = fact.getPredicate(); + registerEdbDeclaration(new LocalFactsDataSourceDeclaration(predicate)); + if (!VLogKnowledgeBase.this.directEdbFacts.containsKey(predicate)) { + final List facts = new ArrayList<>(); + facts.add(fact); + VLogKnowledgeBase.this.directEdbFacts.put(predicate, facts); + } else { + VLogKnowledgeBase.this.directEdbFacts.get(predicate).add(fact); + } + return null; + } + + @Override + public Void visit(final Rule statement) { + VLogKnowledgeBase.this.rules.add(statement); + for (final PositiveLiteral positiveLiteral : statement.getHead()) { + final Predicate predicate = positiveLiteral.getPredicate(); + if (!VLogKnowledgeBase.this.idbPredicates.contains(predicate)) { + if (VLogKnowledgeBase.this.edbPredicates.containsKey(predicate)) { + addEdbAlias(VLogKnowledgeBase.this.edbPredicates.get(predicate)); + VLogKnowledgeBase.this.edbPredicates.remove(predicate); + } + VLogKnowledgeBase.this.idbPredicates.add(predicate); + } + } + return null; + } + + @Override + public Void visit(final DataSourceDeclaration statement) { + registerEdbDeclaration(statement); + return null; + } + + void registerEdbDeclaration(final DataSourceDeclaration dataSourceDeclaration) { + final Predicate predicate = dataSourceDeclaration.getPredicate(); + if (VLogKnowledgeBase.this.idbPredicates.contains(predicate) + || VLogKnowledgeBase.this.aliasedEdbPredicates.contains(predicate)) { + if (!VLogKnowledgeBase.this.aliasesForEdbPredicates.containsKey(dataSourceDeclaration)) { + addEdbAlias(dataSourceDeclaration); + } + } else { + final DataSourceDeclaration currentMainDeclaration = VLogKnowledgeBase.this.edbPredicates + .get(predicate); + if (currentMainDeclaration == null) { + VLogKnowledgeBase.this.edbPredicates.put(predicate, dataSourceDeclaration); + } else if (!currentMainDeclaration.equals(dataSourceDeclaration)) { + addEdbAlias(currentMainDeclaration); + addEdbAlias(dataSourceDeclaration); + VLogKnowledgeBase.this.edbPredicates.remove(predicate); + } // else: predicate already known to have local facts (only) + } + } + + void addEdbAlias(final DataSourceDeclaration dataSourceDeclaration) { + final Predicate predicate = dataSourceDeclaration.getPredicate(); + Predicate aliasPredicate; + if (dataSourceDeclaration instanceof LocalFactsDataSourceDeclaration) { + aliasPredicate = new PredicateImpl(predicate.getName() + "##FACT", predicate.getArity()); + } else { + aliasPredicate = new PredicateImpl(predicate.getName() + "##" + dataSourceDeclaration.hashCode(), + predicate.getArity()); + } + VLogKnowledgeBase.this.aliasesForEdbPredicates.put(dataSourceDeclaration, aliasPredicate); + VLogKnowledgeBase.this.aliasedEdbPredicates.add(predicate); + + final List terms = new ArrayList<>(); + for (int i = 1; i <= predicate.getArity(); i++) { + terms.add(new UniversalVariableImpl("X" + i)); + } + final Literal body = new PositiveLiteralImpl(aliasPredicate, terms); + final PositiveLiteral head = new PositiveLiteralImpl(predicate, terms); + final Rule rule = new RuleImpl(new ConjunctionImpl<>(Arrays.asList(head)), + new ConjunctionImpl<>(Arrays.asList(body))); + VLogKnowledgeBase.this.rules.add(rule); + } + + } + + /** + * Dummy data source declaration for predicates for which we have explicit local + * facts in the input. + * + * @author Markus Kroetzsch + * + */ + class LocalFactsDataSourceDeclaration implements DataSourceDeclaration { + + final Predicate predicate; + + public LocalFactsDataSourceDeclaration(Predicate predicate) { + this.predicate = predicate; + } + + @Override + public T accept(StatementVisitor statementVisitor) { + return statementVisitor.visit(this); + } + + @Override + public Predicate getPredicate() { + return this.predicate; + } + + @Override + public DataSource getDataSource() { + return null; + } + + @Override + public int hashCode() { + return this.predicate.hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final LocalFactsDataSourceDeclaration other = (LocalFactsDataSourceDeclaration) obj; + return this.predicate.equals(other.predicate); + } + } +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index e8db05863..b48b19f50 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -1,705 +1,706 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; - -import java.io.FileNotFoundException; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.OutputStream; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.lang3.Validate; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Statement; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; -import org.semanticweb.vlog4j.core.reasoner.AcyclicityNotion; -import org.semanticweb.vlog4j.core.reasoner.Algorithm; -import org.semanticweb.vlog4j.core.reasoner.Correctness; -import org.semanticweb.vlog4j.core.reasoner.CyclicityResult; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.LogLevel; -import org.semanticweb.vlog4j.core.reasoner.QueryAnswerCount; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.ReasonerState; -import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import karmaresearch.vlog.AlreadyStartedException; -import karmaresearch.vlog.EDBConfigurationException; -import karmaresearch.vlog.MaterializationException; -import karmaresearch.vlog.NonExistingPredicateException; -import karmaresearch.vlog.NotStartedException; -import karmaresearch.vlog.TermQueryResultIterator; -import karmaresearch.vlog.VLog; -import karmaresearch.vlog.VLog.CyclicCheckResult; - -/* - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -/** - * Reasoner implementation using the VLog backend. - * - * - * - * @author Markus Kroetzsch - * - */ -public class VLogReasoner implements Reasoner { - private static Logger LOGGER = LoggerFactory.getLogger(VLogReasoner.class); - - final KnowledgeBase knowledgeBase; - final VLog vLog = new VLog(); - - private ReasonerState reasonerState = ReasonerState.KB_NOT_LOADED; - private Correctness correctness = Correctness.SOUND_BUT_INCOMPLETE; - - private LogLevel internalLogLevel = LogLevel.WARNING; - private Algorithm algorithm = Algorithm.RESTRICTED_CHASE; - private Integer timeoutAfterSeconds; - private RuleRewriteStrategy ruleRewriteStrategy = RuleRewriteStrategy.NONE; - - /** - * Holds the state of the reasoning result. Has value {@code true} if reasoning - * has completed, {@code false} if it has been interrupted. - */ - private boolean reasoningCompleted; - - public VLogReasoner(KnowledgeBase knowledgeBase) { - super(); - this.knowledgeBase = knowledgeBase; - this.knowledgeBase.addListener(this); - - setLogLevel(this.internalLogLevel); - } - - @Override - public KnowledgeBase getKnowledgeBase() { - return this.knowledgeBase; - } - - @Override - public void setAlgorithm(final Algorithm algorithm) { - Validate.notNull(algorithm, "Algorithm cannot be null!"); - validateNotClosed(); - this.algorithm = algorithm; - } - - @Override - public Algorithm getAlgorithm() { - return this.algorithm; - } - - @Override - public void setReasoningTimeout(Integer seconds) { - validateNotClosed(); - if (seconds != null) { - Validate.isTrue(seconds > 0, "Only strictly positive timeout period allowed!", seconds); - } - this.timeoutAfterSeconds = seconds; - } - - @Override - public Integer getReasoningTimeout() { - return this.timeoutAfterSeconds; - } - - @Override - public void setRuleRewriteStrategy(RuleRewriteStrategy ruleRewritingStrategy) { - validateNotClosed(); - Validate.notNull(ruleRewritingStrategy, "Rewrite strategy cannot be null!"); - this.ruleRewriteStrategy = ruleRewritingStrategy; - } - - @Override - public RuleRewriteStrategy getRuleRewriteStrategy() { - return this.ruleRewriteStrategy; - } - - /* - * TODO Due to automatic predicate renaming, it can happen that an EDB predicate - * cannot be queried after loading unless reasoning has already been invoked - * (since the auxiliary rule that imports the EDB facts to the "real" predicate - * must be used). This issue could be weakened by rewriting queries to - * (single-source) EDB predicates internally when in such a state, - */ - // @Override - void load() throws IOException { - validateNotClosed(); - - switch (this.reasonerState) { - case KB_NOT_LOADED: - loadKnowledgeBase(); - break; - case KB_LOADED: - case MATERIALISED: - // do nothing, all KB is already loaded - break; - case KB_CHANGED: - resetReasoner(); - loadKnowledgeBase(); - default: - break; - } - } - - void loadKnowledgeBase() throws IOException { - LOGGER.info("Started loading knowledge base ..."); - - final VLogKnowledgeBase vLogKB = new VLogKnowledgeBase(this.knowledgeBase); - - if (!vLogKB.hasData()) { - LOGGER.warn("No data statements (facts or datasource declarations) have been provided."); - } - - // 1. vLog is initialized by loading VLog data sources - loadVLogDataSources(vLogKB); - - // 2. in-memory data is loaded - loadInMemoryDataSources(vLogKB); - validateDataSourcePredicateArities(vLogKB); - - loadFacts(vLogKB); - - // 3. rules are loaded - loadRules(vLogKB); - - this.reasonerState = ReasonerState.KB_LOADED; - - // if there are no rules, then materialisation state is complete - this.correctness = !vLogKB.hasRules() ? Correctness.SOUND_AND_COMPLETE : Correctness.SOUND_BUT_INCOMPLETE; - - LOGGER.info("Finished loading knowledge base."); - } - - void loadVLogDataSources(final VLogKnowledgeBase vLogKB) throws IOException { - try { - this.vLog.start(vLogKB.getVLogDataSourcesConfigurationString(), false); - } catch (final AlreadyStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); - } catch (final EDBConfigurationException e) { - throw new RuntimeException("Invalid data sources configuration.", e); - } - } - - void loadInMemoryDataSources(final VLogKnowledgeBase vLogKB) { - vLogKB.getEdbPredicates().forEach((k, v) -> loadInMemoryDataSource(v.getDataSource(), k)); - - vLogKB.getAliasesForEdbPredicates().forEach((k, v) -> loadInMemoryDataSource(k.getDataSource(), v)); - } - - void loadInMemoryDataSource(final DataSource dataSource, final Predicate predicate) { - if (dataSource instanceof InMemoryDataSource) { - - final InMemoryDataSource inMemoryDataSource = (InMemoryDataSource) dataSource; - try { - load(predicate, inMemoryDataSource); - } catch (final EDBConfigurationException e) { - throw new RuntimeException("Invalid data sources configuration!", e); - } - } - } - - void load(final Predicate predicate, final InMemoryDataSource inMemoryDataSource) throws EDBConfigurationException { - final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(predicate); - - this.vLog.addData(vLogPredicateName, inMemoryDataSource.getData()); - - if (LOGGER.isDebugEnabled()) { - for (final String[] tuple : inMemoryDataSource.getData()) { - LOGGER.debug("Loaded direct fact {}{}.", vLogPredicateName, Arrays.toString(tuple)); - } - } - } - - /** - * Checks if the loaded external data sources do in fact contain data of the - * correct arity. - * - * @throws IncompatiblePredicateArityException to indicate a problem - * (non-checked exception) - */ - void validateDataSourcePredicateArities(final VLogKnowledgeBase vLogKB) throws IncompatiblePredicateArityException { - - vLogKB.getEdbPredicates().forEach((k, v) -> validateDataSourcePredicateArity(k, v.getDataSource())); - - vLogKB.getAliasesForEdbPredicates().forEach((k, v) -> validateDataSourcePredicateArity(v, k.getDataSource())); - } - - /** - * Checks if the loaded external data for a given source does in fact contain - * data of the correct arity for the given predidate. - * - * @param predicate the predicate for which data is loaded - * @param dataSource the data source used - * - * @throws IncompatiblePredicateArityException to indicate a problem - * (non-checked exception) - */ - void validateDataSourcePredicateArity(Predicate predicate, DataSource dataSource) - throws IncompatiblePredicateArityException { - if (dataSource == null) { - return; - } - try { - final int dataSourcePredicateArity = this.vLog - .getPredicateArity(ModelToVLogConverter.toVLogPredicate(predicate)); - if (dataSourcePredicateArity == -1) { - LOGGER.warn("Data source {} for predicate {} is empty! ", dataSource, predicate); - } else if (predicate.getArity() != dataSourcePredicateArity) { - throw new IncompatiblePredicateArityException(predicate, dataSourcePredicateArity, dataSource); - } - } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state!", e); - } - } - - void loadFacts(final VLogKnowledgeBase vLogKB) { - final Map> directEdbFacts = vLogKB.getDirectEdbFacts(); - - directEdbFacts.forEach((k, v) -> { - try { - final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(vLogKB.getAlias(k)); - final String[][] vLogPredicateTuples = ModelToVLogConverter.toVLogFactTuples(v); - - this.vLog.addData(vLogPredicateName, vLogPredicateTuples); - - if (LOGGER.isDebugEnabled()) { - for (final String[] tuple : vLogPredicateTuples) { - LOGGER.debug("Loaded direct fact {}{}.", vLogPredicateName, Arrays.toString(tuple)); - } - } - } catch (final EDBConfigurationException e) { - throw new RuntimeException("Invalid data sources configuration!", e); - } - - }); - } - - void loadRules(final VLogKnowledgeBase vLogKB) { - final karmaresearch.vlog.Rule[] vLogRuleArray = ModelToVLogConverter.toVLogRuleArray(vLogKB.getRules()); - final karmaresearch.vlog.VLog.RuleRewriteStrategy vLogRuleRewriteStrategy = ModelToVLogConverter - .toVLogRuleRewriteStrategy(this.ruleRewriteStrategy); - try { - this.vLog.setRules(vLogRuleArray, vLogRuleRewriteStrategy); - if (LOGGER.isDebugEnabled()) { - for (final karmaresearch.vlog.Rule rule : vLogRuleArray) { - LOGGER.debug("Loaded rule {}.", rule.toString()); - } - } - } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state!", e); - } - } - - @Override - public boolean reason() throws IOException { - validateNotClosed(); - - switch (this.reasonerState) { - case KB_NOT_LOADED: - load(); - runChase(); - break; - case KB_LOADED: - runChase(); - break; - case KB_CHANGED: - resetReasoner(); - load(); - runChase(); - break; - case MATERIALISED: - runChase(); - break; - default: - break; - } - - return this.reasoningCompleted; - } - - private void runChase() { - LOGGER.info("Started materialisation of inferences ..."); - this.reasonerState = ReasonerState.MATERIALISED; - - final boolean skolemChase = this.algorithm == Algorithm.SKOLEM_CHASE; - try { - if (this.timeoutAfterSeconds == null) { - this.vLog.materialize(skolemChase); - this.reasoningCompleted = true; - } else { - this.reasoningCompleted = this.vLog.materialize(skolemChase, this.timeoutAfterSeconds); - } - } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); - } catch (final MaterializationException e) { - // FIXME: the message generated here is not guaranteed to be the correct - // interpretation of the exception that is caught - throw new RuntimeException( - "Knowledge base incompatible with stratified negation: either the Rules are not stratifiable, or the variables in negated atom cannot be bound.", - e); - } - - if (this.reasoningCompleted) { - this.correctness = Correctness.SOUND_AND_COMPLETE; - LOGGER.info("Completed materialisation of inferences."); - } else { - this.correctness = Correctness.SOUND_BUT_INCOMPLETE; - LOGGER.info("Stopped materialisation of inferences (possibly incomplete)."); - } - } - - @Override - public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNulls) { - validateBeforeQuerying(query); - - final boolean filterBlanks = !includeNulls; - final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); - - TermQueryResultIterator stringQueryResultIterator; - try { - stringQueryResultIterator = this.vLog.query(vLogAtom, true, filterBlanks); - } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); - } catch (final NonExistingPredicateException e1) { - LOGGER.warn("Query uses predicate " + query.getPredicate() - + " that does not occur in the knowledge base. Answer must be empty!"); - return new EmptyQueryResultIterator(Correctness.SOUND_AND_COMPLETE); - } - - logWarningOnCorrectness(); - return new VLogQueryResultIterator(stringQueryResultIterator, this.correctness); - } - - @Override - public QueryAnswerCount countQueryAnswers(PositiveLiteral query, boolean includeNulls) { - validateBeforeQuerying(query); - - final boolean filterBlanks = !includeNulls; - final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); - - long result; - try { - result = this.vLog.querySize(vLogAtom, true, filterBlanks); - } catch (NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); - } catch (NonExistingPredicateException e) { - LOGGER.warn("Query uses predicate " + query.getPredicate() - + " that does not occur in the knowledge base. Answer must be empty!"); - result = 0; - } - logWarningOnCorrectness(); - return new QueryAnswerCountImpl(this.correctness, result); - } - - @Override - public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final String csvFilePath, - final boolean includeBlanks) throws IOException { - validateBeforeQuerying(query); - Validate.notNull(csvFilePath, "File to export query answer to must not be null!"); - Validate.isTrue(csvFilePath.endsWith(".csv"), "Expected .csv extension for file [%s]!", csvFilePath); - - final boolean filterBlanks = !includeBlanks; - final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); - try { - this.vLog.writeQueryResultsToCsv(vLogAtom, csvFilePath, filterBlanks); - } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state!", e); - } catch (final NonExistingPredicateException e1) { - LOGGER.warn("Query uses predicate " + query.getPredicate() - + " that does not occur in the knowledge base. Answers are therefore empty."); - } - - logWarningOnCorrectness(); - return this.correctness; - } - - private void validateBeforeQuerying(final PositiveLiteral query) { - validateNotClosed(); - if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { - throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); - } - Validate.notNull(query, "Query atom must not be null!"); - } - - @Override - public Correctness writeInferences(OutputStream stream) throws IOException { - validateNotClosed(); - if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { - throw new ReasonerStateException(this.reasonerState, - "Obtaining inferences is not alowed before reasoner is loaded!"); - } - final Set toBeQueriedHeadPredicates = getKnolwedgeBasePredicates(); - - for (final Predicate predicate : toBeQueriedHeadPredicates) { - final PositiveLiteral queryAtom = getQueryAtom(predicate); - final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(queryAtom); - try (final TermQueryResultIterator answers = this.vLog.query(vLogAtom, true, false)) { - while (answers.hasNext()) { - final karmaresearch.vlog.Term[] vlogTerms = answers.next(); - final List termList = VLogToModelConverter.toTermList(vlogTerms); - stream.write(Serializer.getFactString(predicate, termList).getBytes()); - } - } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); - } catch (final NonExistingPredicateException e1) { - throw new RuntimeException("Inconsistent knowledge base state.", e1); - } - } - - logWarningOnCorrectness(); - return this.correctness; - } - - @Override - public Correctness writeInferences(String filePath) throws FileNotFoundException, IOException { - try (OutputStream stream = new FileOutputStream(filePath)) { - return writeInferences(stream); - } - } - - private void logWarningOnCorrectness() { - if (this.correctness != Correctness.SOUND_AND_COMPLETE) { - LOGGER.warn("Query answers may be {} with respect to the current Knowledge Base!", this.correctness); - } - } - - @Override - public void resetReasoner() { - validateNotClosed(); - this.reasonerState = ReasonerState.KB_NOT_LOADED; - this.vLog.stop(); - LOGGER.info("Reasoner has been reset. All inferences computed during reasoning have been discarded."); - } - - @Override - public void close() { - if (this.reasonerState == ReasonerState.CLOSED) { - LOGGER.info("Reasoner is already closed."); - } else { - this.reasonerState = ReasonerState.CLOSED; - this.knowledgeBase.deleteListener(this); - this.vLog.stop(); - LOGGER.info("Reasoner closed."); - } - } - - @Override - public void setLogLevel(LogLevel logLevel) { - validateNotClosed(); - Validate.notNull(logLevel, "Log level cannot be null!"); - this.internalLogLevel = logLevel; - this.vLog.setLogLevel(ModelToVLogConverter.toVLogLogLevel(this.internalLogLevel)); - } - - @Override - public LogLevel getLogLevel() { - return this.internalLogLevel; - } - - @Override - public void setLogFile(String filePath) { - validateNotClosed(); - this.vLog.setLogFile(filePath); - } - - @Override - public boolean isJA() { - return checkAcyclicity(AcyclicityNotion.JA); - } - - @Override - public boolean isRJA() { - return checkAcyclicity(AcyclicityNotion.RJA); - } - - @Override - public boolean isMFA() { - return checkAcyclicity(AcyclicityNotion.MFA); - } - - @Override - public boolean isRMFA() { - return checkAcyclicity(AcyclicityNotion.RMFA); - } - - @Override - public boolean isMFC() { - validateNotClosed(); - if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { - throw new ReasonerStateException(this.reasonerState, - "Checking rules acyclicity is not allowed before loading!"); - } - - CyclicCheckResult checkCyclic; - try { - checkCyclic = this.vLog.checkCyclic("MFC"); - } catch (final NotStartedException e) { - throw new RuntimeException(e.getMessage(), e); // should be impossible - } - return checkCyclic.equals(CyclicCheckResult.CYCLIC); - } - - @Override - public CyclicityResult checkForCycles() { - final boolean acyclic = isJA() || isRJA() || isMFA() || isRMFA(); - if (acyclic) { - return CyclicityResult.ACYCLIC; - } else { - final boolean cyclic = isMFC(); - if (cyclic) { - return CyclicityResult.CYCLIC; - } - return CyclicityResult.UNDETERMINED; - } - } - - @Override - public void onStatementsAdded(List statementsAdded) { - // TODO more elaborate materialisation state handling - - updateReasonerToKnowledgeBaseChanged(); - - // updateCorrectnessOnStatementsAdded(statementsAdded); - updateCorrectnessOnStatementsAdded(); - } - - @Override - public void onStatementAdded(Statement statementAdded) { - // TODO more elaborate materialisation state handling - - updateReasonerToKnowledgeBaseChanged(); - - // updateCorrectnessOnStatementAdded(statementAdded); - updateCorrectnessOnStatementsAdded(); - } - - @Override - public void onStatementRemoved(Statement statementRemoved) { - updateReasonerToKnowledgeBaseChanged(); - updateCorrectnessOnStatementsRemoved(); - } - - @Override - public void onStatementsRemoved(List statementsRemoved) { - updateReasonerToKnowledgeBaseChanged(); - updateCorrectnessOnStatementsRemoved(); - } - - Set getKnolwedgeBasePredicates() { - final Set toBeQueriedHeadPredicates = new HashSet<>(); - for (final Rule rule : this.knowledgeBase.getRules()) { - for (final Literal literal : rule.getHead()) { - toBeQueriedHeadPredicates.add(literal.getPredicate()); - } - } - for (final DataSourceDeclaration dataSourceDeclaration : this.knowledgeBase.getDataSourceDeclarations()) { - toBeQueriedHeadPredicates.add(dataSourceDeclaration.getPredicate()); - } - for (final Fact fact : this.knowledgeBase.getFacts()) { - toBeQueriedHeadPredicates.add(fact.getPredicate()); - } - return toBeQueriedHeadPredicates; - } - - private PositiveLiteral getQueryAtom(final Predicate predicate) { - final List toBeGroundedVariables = new ArrayList<>(predicate.getArity()); - for (int i = 0; i < predicate.getArity(); i++) { - toBeGroundedVariables.add(Expressions.makeUniversalVariable("X" + i)); - } - return Expressions.makePositiveLiteral(predicate, toBeGroundedVariables); - } - - private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) { - validateNotClosed(); - if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { - try { - load(); - } catch (final IOException e) { // FIXME: quick fix for https://github.com/knowsys/vlog4j/issues/128 - throw new RuntimeException(e); - } - } - - CyclicCheckResult checkCyclic; - try { - checkCyclic = this.vLog.checkCyclic(acyclNotion.name()); - } catch (final NotStartedException e) { - throw new RuntimeException(e.getMessage(), e); // should be impossible - } - return checkCyclic.equals(CyclicCheckResult.NON_CYCLIC); - } - - private void updateReasonerToKnowledgeBaseChanged() { - if (this.reasonerState.equals(ReasonerState.KB_LOADED) - || this.reasonerState.equals(ReasonerState.MATERIALISED)) { - - this.reasonerState = ReasonerState.KB_CHANGED; - } - } - - private void updateCorrectnessOnStatementsAdded() { - if (this.reasonerState == ReasonerState.KB_CHANGED) { - // TODO refine - this.correctness = Correctness.INCORRECT; - } - } - - private void updateCorrectnessOnStatementsRemoved() { - if (this.reasonerState == ReasonerState.KB_CHANGED) { - // TODO refine - this.correctness = Correctness.INCORRECT; - } - } - - /** - * Check if reasoner is closed and throw an exception if it is. - * - * @throws ReasonerStateException - */ - void validateNotClosed() throws ReasonerStateException { - if (this.reasonerState == ReasonerState.CLOSED) { - LOGGER.error("Invalid operation requested on a closed reasoner object!"); - throw new ReasonerStateException(this.reasonerState, "Operation not allowed after closing reasoner!"); - } - } - - ReasonerState getReasonerState() { - return this.reasonerState; - } - - void setReasonerState(ReasonerState reasonerState) { - this.reasonerState = reasonerState; - } -} +package org.semanticweb.vlog4j.core.reasoner.implementation; + +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.apache.commons.lang3.Validate; +import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; +import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.Literal; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Statement; +import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; +import org.semanticweb.vlog4j.core.reasoner.AcyclicityNotion; +import org.semanticweb.vlog4j.core.reasoner.Algorithm; +import org.semanticweb.vlog4j.core.reasoner.Correctness; +import org.semanticweb.vlog4j.core.reasoner.CyclicityResult; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.LogLevel; +import org.semanticweb.vlog4j.core.reasoner.QueryAnswerCount; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; +import org.semanticweb.vlog4j.core.reasoner.Reasoner; +import org.semanticweb.vlog4j.core.reasoner.ReasonerState; +import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import karmaresearch.vlog.AlreadyStartedException; +import karmaresearch.vlog.EDBConfigurationException; +import karmaresearch.vlog.MaterializationException; +import karmaresearch.vlog.NonExistingPredicateException; +import karmaresearch.vlog.NotStartedException; +import karmaresearch.vlog.TermQueryResultIterator; +import karmaresearch.vlog.VLog; +import karmaresearch.vlog.VLog.CyclicCheckResult; + +/* + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * Reasoner implementation using the VLog backend. + * + * + * + * @author Markus Kroetzsch + * + */ +public class VLogReasoner implements Reasoner { + private static Logger LOGGER = LoggerFactory.getLogger(VLogReasoner.class); + + final KnowledgeBase knowledgeBase; + final VLog vLog = new VLog(); + + private ReasonerState reasonerState = ReasonerState.KB_NOT_LOADED; + private Correctness correctness = Correctness.SOUND_BUT_INCOMPLETE; + + private LogLevel internalLogLevel = LogLevel.WARNING; + private Algorithm algorithm = Algorithm.RESTRICTED_CHASE; + private Integer timeoutAfterSeconds; + private RuleRewriteStrategy ruleRewriteStrategy = RuleRewriteStrategy.NONE; + + /** + * Holds the state of the reasoning result. Has value {@code true} if reasoning + * has completed, {@code false} if it has been interrupted. + */ + private boolean reasoningCompleted; + + public VLogReasoner(KnowledgeBase knowledgeBase) { + super(); + this.knowledgeBase = knowledgeBase; + this.knowledgeBase.addListener(this); + + setLogLevel(this.internalLogLevel); + } + + @Override + public KnowledgeBase getKnowledgeBase() { + return this.knowledgeBase; + } + + @Override + public void setAlgorithm(final Algorithm algorithm) { + Validate.notNull(algorithm, "Algorithm cannot be null!"); + validateNotClosed(); + this.algorithm = algorithm; + } + + @Override + public Algorithm getAlgorithm() { + return this.algorithm; + } + + @Override + public void setReasoningTimeout(Integer seconds) { + validateNotClosed(); + if (seconds != null) { + Validate.isTrue(seconds > 0, "Only strictly positive timeout period allowed!", seconds); + } + this.timeoutAfterSeconds = seconds; + } + + @Override + public Integer getReasoningTimeout() { + return this.timeoutAfterSeconds; + } + + @Override + public void setRuleRewriteStrategy(RuleRewriteStrategy ruleRewritingStrategy) { + validateNotClosed(); + Validate.notNull(ruleRewritingStrategy, "Rewrite strategy cannot be null!"); + this.ruleRewriteStrategy = ruleRewritingStrategy; + } + + @Override + public RuleRewriteStrategy getRuleRewriteStrategy() { + return this.ruleRewriteStrategy; + } + + /* + * TODO Due to automatic predicate renaming, it can happen that an EDB predicate + * cannot be queried after loading unless reasoning has already been invoked + * (since the auxiliary rule that imports the EDB facts to the "real" predicate + * must be used). This issue could be weakened by rewriting queries to + * (single-source) EDB predicates internally when in such a state, + */ + // @Override + void load() throws IOException { + validateNotClosed(); + + switch (this.reasonerState) { + case KB_NOT_LOADED: + loadKnowledgeBase(); + break; + case KB_LOADED: + case MATERIALISED: + // do nothing, all KB is already loaded + break; + case KB_CHANGED: + resetReasoner(); + loadKnowledgeBase(); + default: + break; + } + } + + void loadKnowledgeBase() throws IOException { + LOGGER.info("Started loading knowledge base ..."); + + final VLogKnowledgeBase vLogKB = new VLogKnowledgeBase(this.knowledgeBase); + + if (!vLogKB.hasData()) { + LOGGER.warn("No data statements (facts or datasource declarations) have been provided."); + } + + // 1. vLog is initialized by loading VLog data sources + loadVLogDataSources(vLogKB); + + // 2. in-memory data is loaded + loadInMemoryDataSources(vLogKB); + validateDataSourcePredicateArities(vLogKB); + + loadFacts(vLogKB); + + // 3. rules are loaded + loadRules(vLogKB); + + this.reasonerState = ReasonerState.KB_LOADED; + + // if there are no rules, then materialisation state is complete + this.correctness = !vLogKB.hasRules() ? Correctness.SOUND_AND_COMPLETE : Correctness.SOUND_BUT_INCOMPLETE; + + LOGGER.info("Finished loading knowledge base."); + } + + void loadVLogDataSources(final VLogKnowledgeBase vLogKB) throws IOException { + try { + this.vLog.start(vLogKB.getVLogDataSourcesConfigurationString(), false); + } catch (final AlreadyStartedException e) { + throw new RuntimeException("Inconsistent reasoner state.", e); + } catch (final EDBConfigurationException e) { + throw new RuntimeException("Invalid data sources configuration.", e); + } + } + + void loadInMemoryDataSources(final VLogKnowledgeBase vLogKB) { + vLogKB.getEdbPredicates().forEach((k, v) -> loadInMemoryDataSource(v.getDataSource(), k)); + + vLogKB.getAliasesForEdbPredicates().forEach((k, v) -> loadInMemoryDataSource(k.getDataSource(), v)); + } + + void loadInMemoryDataSource(final DataSource dataSource, final Predicate predicate) { + if (dataSource instanceof InMemoryDataSource) { + + final InMemoryDataSource inMemoryDataSource = (InMemoryDataSource) dataSource; + try { + load(predicate, inMemoryDataSource); + } catch (final EDBConfigurationException e) { + throw new RuntimeException("Invalid data sources configuration!", e); + } + } + } + + void load(final Predicate predicate, final InMemoryDataSource inMemoryDataSource) throws EDBConfigurationException { + final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(predicate); + + this.vLog.addData(vLogPredicateName, inMemoryDataSource.getData()); + + if (LOGGER.isDebugEnabled()) { + for (final String[] tuple : inMemoryDataSource.getData()) { + LOGGER.debug("Loaded direct fact {}{}.", vLogPredicateName, Arrays.toString(tuple)); + } + } + } + + /** + * Checks if the loaded external data sources do in fact contain data of the + * correct arity. + * + * @throws IncompatiblePredicateArityException to indicate a problem + * (non-checked exception) + */ + void validateDataSourcePredicateArities(final VLogKnowledgeBase vLogKB) throws IncompatiblePredicateArityException { + + vLogKB.getEdbPredicates().forEach((k, v) -> validateDataSourcePredicateArity(k, v.getDataSource())); + + vLogKB.getAliasesForEdbPredicates().forEach((k, v) -> validateDataSourcePredicateArity(v, k.getDataSource())); + } + + /** + * Checks if the loaded external data for a given source does in fact contain + * data of the correct arity for the given predidate. + * + * @param predicate the predicate for which data is loaded + * @param dataSource the data source used + * + * @throws IncompatiblePredicateArityException to indicate a problem + * (non-checked exception) + */ + void validateDataSourcePredicateArity(Predicate predicate, DataSource dataSource) + throws IncompatiblePredicateArityException { + if (dataSource == null) { + return; + } + try { + final int dataSourcePredicateArity = this.vLog + .getPredicateArity(ModelToVLogConverter.toVLogPredicate(predicate)); + if (dataSourcePredicateArity == -1) { + LOGGER.warn("Data source {} for predicate {} is empty! ", dataSource, predicate); + } else if (predicate.getArity() != dataSourcePredicateArity) { + throw new IncompatiblePredicateArityException(predicate, dataSourcePredicateArity, dataSource); + } + } catch (final NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state!", e); + } + } + + void loadFacts(final VLogKnowledgeBase vLogKB) { + final Map> directEdbFacts = vLogKB.getDirectEdbFacts(); + + directEdbFacts.forEach((k, v) -> { + try { + final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(vLogKB.getAlias(k)); + final String[][] vLogPredicateTuples = ModelToVLogConverter.toVLogFactTuples(v); + + this.vLog.addData(vLogPredicateName, vLogPredicateTuples); + + if (LOGGER.isDebugEnabled()) { + for (final String[] tuple : vLogPredicateTuples) { + LOGGER.debug("Loaded direct fact {}{}.", vLogPredicateName, Arrays.toString(tuple)); + } + } + } catch (final EDBConfigurationException e) { + throw new RuntimeException("Invalid data sources configuration!", e); + } + + }); + } + + void loadRules(final VLogKnowledgeBase vLogKB) { + final karmaresearch.vlog.Rule[] vLogRuleArray = ModelToVLogConverter.toVLogRuleArray(vLogKB.getRules()); + final karmaresearch.vlog.VLog.RuleRewriteStrategy vLogRuleRewriteStrategy = ModelToVLogConverter + .toVLogRuleRewriteStrategy(this.ruleRewriteStrategy); + try { + this.vLog.setRules(vLogRuleArray, vLogRuleRewriteStrategy); + if (LOGGER.isDebugEnabled()) { + for (final karmaresearch.vlog.Rule rule : vLogRuleArray) { + LOGGER.debug("Loaded rule {}.", rule.toString()); + } + } + } catch (final NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state!", e); + } + } + + @Override + public boolean reason() throws IOException { + validateNotClosed(); + + switch (this.reasonerState) { + case KB_NOT_LOADED: + load(); + runChase(); + break; + case KB_LOADED: + runChase(); + break; + case KB_CHANGED: + resetReasoner(); + load(); + runChase(); + break; + case MATERIALISED: + runChase(); + break; + default: + break; + } + + return this.reasoningCompleted; + } + + private void runChase() { + LOGGER.info("Started materialisation of inferences ..."); + this.reasonerState = ReasonerState.MATERIALISED; + + final boolean skolemChase = this.algorithm == Algorithm.SKOLEM_CHASE; + try { + if (this.timeoutAfterSeconds == null) { + this.vLog.materialize(skolemChase); + this.reasoningCompleted = true; + } else { + this.reasoningCompleted = this.vLog.materialize(skolemChase, this.timeoutAfterSeconds); + } + } catch (final NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state.", e); + } catch (final MaterializationException e) { + // FIXME: the message generated here is not guaranteed to be the correct + // interpretation of the exception that is caught + throw new RuntimeException( + "Knowledge base incompatible with stratified negation: either the Rules are not stratifiable, or the variables in negated atom cannot be bound.", + e); + } + + if (this.reasoningCompleted) { + this.correctness = Correctness.SOUND_AND_COMPLETE; + LOGGER.info("Completed materialisation of inferences."); + } else { + this.correctness = Correctness.SOUND_BUT_INCOMPLETE; + LOGGER.info("Stopped materialisation of inferences (possibly incomplete)."); + } + } + + @Override + public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNulls) { + validateBeforeQuerying(query); + + final boolean filterBlanks = !includeNulls; + final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); + + TermQueryResultIterator stringQueryResultIterator; + try { + stringQueryResultIterator = this.vLog.query(vLogAtom, true, filterBlanks); + } catch (final NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state.", e); + } catch (final NonExistingPredicateException e1) { + LOGGER.warn("Query uses predicate " + query.getPredicate() + + " that does not occur in the knowledge base. Answer must be empty!"); + return new EmptyQueryResultIterator(Correctness.SOUND_AND_COMPLETE); + } + + logWarningOnCorrectness(); + return new VLogQueryResultIterator(stringQueryResultIterator, this.correctness); + } + + @Override + public QueryAnswerCount countQueryAnswers(PositiveLiteral query, boolean includeNulls) { + validateBeforeQuerying(query); + + final boolean filterBlanks = !includeNulls; + final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); + + long result; + try { + result = this.vLog.querySize(vLogAtom, true, filterBlanks); + } catch (NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state.", e); + } catch (NonExistingPredicateException e) { + LOGGER.warn("Query uses predicate " + query.getPredicate() + + " that does not occur in the knowledge base. Answer must be empty!"); + result = 0; + } + logWarningOnCorrectness(); + return new QueryAnswerCountImpl(this.correctness, result); + } + + @Override + public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final String csvFilePath, + final boolean includeBlanks) throws IOException { + validateBeforeQuerying(query); + + Validate.notNull(csvFilePath, "File to export query answer to must not be null!"); + Validate.isTrue(csvFilePath.endsWith(".csv"), "Expected .csv extension for file [%s]!", csvFilePath); + + final boolean filterBlanks = !includeBlanks; + final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); + try { + this.vLog.writeQueryResultsToCsv(vLogAtom, csvFilePath, filterBlanks); + } catch (final NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state!", e); + } catch (final NonExistingPredicateException e1) { + LOGGER.warn("Query uses predicate " + query.getPredicate() + + " that does not occur in the knowledge base. Answers are therefore empty."); + } + + logWarningOnCorrectness(); + return this.correctness; + } + + private void validateBeforeQuerying(final PositiveLiteral query) { + validateNotClosed(); + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { + throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); + } + Validate.notNull(query, "Query atom must not be null!"); + } + + @Override + public Correctness writeInferences(OutputStream stream) throws IOException { + validateNotClosed(); + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { + throw new ReasonerStateException(this.reasonerState, + "Obtaining inferences is not alowed before reasoner is loaded!"); + } + final Set toBeQueriedHeadPredicates = getKnolwedgeBasePredicates(); + + for (final Predicate predicate : toBeQueriedHeadPredicates) { + final PositiveLiteral queryAtom = getQueryAtom(predicate); + final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(queryAtom); + try (final TermQueryResultIterator answers = this.vLog.query(vLogAtom, true, false)) { + while (answers.hasNext()) { + final karmaresearch.vlog.Term[] vlogTerms = answers.next(); + final List termList = VLogToModelConverter.toTermList(vlogTerms); + stream.write(Serializer.getFactString(predicate, termList).getBytes()); + } + } catch (final NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state.", e); + } catch (final NonExistingPredicateException e1) { + throw new RuntimeException("Inconsistent knowledge base state.", e1); + } + } + + logWarningOnCorrectness(); + return this.correctness; + } + + @Override + public Correctness writeInferences(String filePath) throws FileNotFoundException, IOException { + try (OutputStream stream = new FileOutputStream(filePath)) { + return writeInferences(stream); + } + } + + private void logWarningOnCorrectness() { + if (this.correctness != Correctness.SOUND_AND_COMPLETE) { + LOGGER.warn("Query answers may be {} with respect to the current Knowledge Base!", this.correctness); + } + } + + @Override + public void resetReasoner() { + validateNotClosed(); + this.reasonerState = ReasonerState.KB_NOT_LOADED; + this.vLog.stop(); + LOGGER.info("Reasoner has been reset. All inferences computed during reasoning have been discarded."); + } + + @Override + public void close() { + if (this.reasonerState == ReasonerState.CLOSED) { + LOGGER.info("Reasoner is already closed."); + } else { + this.reasonerState = ReasonerState.CLOSED; + this.knowledgeBase.deleteListener(this); + this.vLog.stop(); + LOGGER.info("Reasoner closed."); + } + } + + @Override + public void setLogLevel(LogLevel logLevel) { + validateNotClosed(); + Validate.notNull(logLevel, "Log level cannot be null!"); + this.internalLogLevel = logLevel; + this.vLog.setLogLevel(ModelToVLogConverter.toVLogLogLevel(this.internalLogLevel)); + } + + @Override + public LogLevel getLogLevel() { + return this.internalLogLevel; + } + + @Override + public void setLogFile(String filePath) { + validateNotClosed(); + this.vLog.setLogFile(filePath); + } + + @Override + public boolean isJA() { + return checkAcyclicity(AcyclicityNotion.JA); + } + + @Override + public boolean isRJA() { + return checkAcyclicity(AcyclicityNotion.RJA); + } + + @Override + public boolean isMFA() { + return checkAcyclicity(AcyclicityNotion.MFA); + } + + @Override + public boolean isRMFA() { + return checkAcyclicity(AcyclicityNotion.RMFA); + } + + @Override + public boolean isMFC() { + validateNotClosed(); + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { + throw new ReasonerStateException(this.reasonerState, + "Checking rules acyclicity is not allowed before loading!"); + } + + CyclicCheckResult checkCyclic; + try { + checkCyclic = this.vLog.checkCyclic("MFC"); + } catch (final NotStartedException e) { + throw new RuntimeException(e.getMessage(), e); // should be impossible + } + return checkCyclic.equals(CyclicCheckResult.CYCLIC); + } + + @Override + public CyclicityResult checkForCycles() { + final boolean acyclic = isJA() || isRJA() || isMFA() || isRMFA(); + if (acyclic) { + return CyclicityResult.ACYCLIC; + } else { + final boolean cyclic = isMFC(); + if (cyclic) { + return CyclicityResult.CYCLIC; + } + return CyclicityResult.UNDETERMINED; + } + } + + @Override + public void onStatementsAdded(List statementsAdded) { + // TODO more elaborate materialisation state handling + + updateReasonerToKnowledgeBaseChanged(); + + // updateCorrectnessOnStatementsAdded(statementsAdded); + updateCorrectnessOnStatementsAdded(); + } + + @Override + public void onStatementAdded(Statement statementAdded) { + // TODO more elaborate materialisation state handling + + updateReasonerToKnowledgeBaseChanged(); + + // updateCorrectnessOnStatementAdded(statementAdded); + updateCorrectnessOnStatementsAdded(); + } + + @Override + public void onStatementRemoved(Statement statementRemoved) { + updateReasonerToKnowledgeBaseChanged(); + updateCorrectnessOnStatementsRemoved(); + } + + @Override + public void onStatementsRemoved(List statementsRemoved) { + updateReasonerToKnowledgeBaseChanged(); + updateCorrectnessOnStatementsRemoved(); + } + + Set getKnolwedgeBasePredicates() { + final Set toBeQueriedHeadPredicates = new HashSet<>(); + for (final Rule rule : this.knowledgeBase.getRules()) { + for (final Literal literal : rule.getHead()) { + toBeQueriedHeadPredicates.add(literal.getPredicate()); + } + } + for (final DataSourceDeclaration dataSourceDeclaration : this.knowledgeBase.getDataSourceDeclarations()) { + toBeQueriedHeadPredicates.add(dataSourceDeclaration.getPredicate()); + } + for (final Fact fact : this.knowledgeBase.getFacts()) { + toBeQueriedHeadPredicates.add(fact.getPredicate()); + } + return toBeQueriedHeadPredicates; + } + + private PositiveLiteral getQueryAtom(final Predicate predicate) { + final List toBeGroundedVariables = new ArrayList<>(predicate.getArity()); + for (int i = 0; i < predicate.getArity(); i++) { + toBeGroundedVariables.add(Expressions.makeUniversalVariable("X" + i)); + } + return Expressions.makePositiveLiteral(predicate, toBeGroundedVariables); + } + + private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) { + validateNotClosed(); + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { + try { + load(); + } catch (final IOException e) { // FIXME: quick fix for https://github.com/knowsys/vlog4j/issues/128 + throw new RuntimeException(e); + } + } + + CyclicCheckResult checkCyclic; + try { + checkCyclic = this.vLog.checkCyclic(acyclNotion.name()); + } catch (final NotStartedException e) { + throw new RuntimeException(e.getMessage(), e); // should be impossible + } + return checkCyclic.equals(CyclicCheckResult.NON_CYCLIC); + } + + private void updateReasonerToKnowledgeBaseChanged() { + if (this.reasonerState.equals(ReasonerState.KB_LOADED) + || this.reasonerState.equals(ReasonerState.MATERIALISED)) { + + this.reasonerState = ReasonerState.KB_CHANGED; + } + } + + private void updateCorrectnessOnStatementsAdded() { + if (this.reasonerState == ReasonerState.KB_CHANGED) { + // TODO refine + this.correctness = Correctness.INCORRECT; + } + } + + private void updateCorrectnessOnStatementsRemoved() { + if (this.reasonerState == ReasonerState.KB_CHANGED) { + // TODO refine + this.correctness = Correctness.INCORRECT; + } + } + + /** + * Check if reasoner is closed and throw an exception if it is. + * + * @throws ReasonerStateException + */ + void validateNotClosed() throws ReasonerStateException { + if (this.reasonerState == ReasonerState.CLOSED) { + LOGGER.error("Invalid operation requested on a closed reasoner object!"); + throw new ReasonerStateException(this.reasonerState, "Operation not allowed after closing reasoner!"); + } + } + + ReasonerState getReasonerState() { + return this.reasonerState; + } + + void setReasonerState(ReasonerState reasonerState) { + this.reasonerState = reasonerState; + } +} From bba8681aa401f5691df7b5fd75aecd8158f9aded Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 10 Feb 2020 20:45:10 +0100 Subject: [PATCH 0535/1003] Parser: Track prefix declarations as part of the knowledge base --- .../core/model/api/PrefixDeclarations.java | 13 +- .../MergeablePrefixDeclarations.java | 178 ++++++++++++++++++ .../vlog4j/core/reasoner/KnowledgeBase.java | 46 +++-- .../parser/LocalPrefixDeclarations.java | 12 +- .../semanticweb/vlog4j/parser/RuleParser.java | 4 +- .../ImportFileRelativeDirectiveHandler.java | 2 +- 6 files changed, 228 insertions(+), 27 deletions(-) create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarations.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarations.java index 857d81160..942c10a60 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarations.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarations.java @@ -11,9 +11,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -25,10 +25,10 @@ /** * Registry that manages prefixes and base namespace declarations as used for * parsing and serialising inputs. - * + * * @author Markus Kroetzsch */ -public interface PrefixDeclarations { +public interface PrefixDeclarations extends Iterable { static final String XSD = "http://www.w3.org/2001/XMLSchema#"; static final String XSD_STRING = "http://www.w3.org/2001/XMLSchema#string"; @@ -42,7 +42,7 @@ public interface PrefixDeclarations { /** * Returns the relevant base namespace. This should always return a result, * possibly using a local default value if no base was declared. - * + * * @return string of an absolute base IRI */ String getBase(); @@ -50,7 +50,7 @@ public interface PrefixDeclarations { /** * Sets the base namespace to the given value. This should only be done once, * and not after the base namespace was assumed to be an implicit default value. - * + * * @param base the new base namespace * @throws PrefixDeclarationException if base was already defined */ @@ -63,5 +63,4 @@ public interface PrefixDeclarations { String resolvePrefixedName(String prefixedName) throws PrefixDeclarationException; String absolutize(String prefixedName) throws PrefixDeclarationException; - } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java new file mode 100644 index 000000000..f586fa9ff --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java @@ -0,0 +1,178 @@ +package org.semanticweb.vlog4j.core.model.implementation; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.net.URI; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.function.BiFunction; + +import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; + +/** + * Implementation of {@link PrefixDeclarations} that is suitable for + * incrementally parsing from multiple sources. When trying to merge in + * conflicting prefix declarations, a fresh non-conflicting prefix is generated + * instead. + * + * @author Maximilian Marx + */ +final public class MergeablePrefixDeclarations implements PrefixDeclarations { + private Map prefixes = new HashMap<>(); + + private String baseUri = EMPTY_BASE_PREFIX; + private long nextIndex = 0; + + private static final String EMPTY_BASE_PREFIX = ""; + private static final String GENERATED_PREFIX_PREFIX = "vlog4j_generated_"; + + public MergeablePrefixDeclarations() { + } + + public MergeablePrefixDeclarations(final PrefixDeclarations prefixDeclarations) { + super(); + mergePrefixDeclarations(prefixDeclarations); + } + + @Override + public String getBase() { + return baseUri; + } + + @Override + public void setBase(String base) { + if (base != this.baseUri && this.baseUri != EMPTY_BASE_PREFIX) { + prefixes.put(getFreshPrefix(), this.baseUri); + } + + this.baseUri = base; + } + + @Override + public String getPrefix(String prefix) throws PrefixDeclarationException { + if (!prefixes.containsKey(prefix)) { + throw new PrefixDeclarationException("Prefix \"" + prefix + "\" cannot be resolved (not declared yet)."); + } + return prefixes.get(prefix); + } + + @Override + public void setPrefix(String prefix, String iri) { + String prefixName = prefixes.containsKey(prefix) ? getFreshPrefix() : prefix; + prefixes.put(prefixName, iri); + } + + @Override + public String resolvePrefixedName(String prefixedName) throws PrefixDeclarationException { + int colon = prefixedName.indexOf(":"); + String prefix = prefixedName.substring(0, colon + 1); + String suffix = prefixedName.substring(colon + 1); + + return getPrefix(prefix) + suffix; + } + + /** + * Turn an absolute Iri into a (possibly) prefixed name. Dual to + * {@link resolvePrefixedName}. + * + * @param iri an absolute Iri to abbreviate. + * + * @return an abbreviated form of {@code iri} if an appropriate prefix is known, + * or {@code iri}. + */ + public String unresolveAbsoluteIri(String iri) { + Map matches = new HashMap<>(); + + prefixes.forEach((prefixName, baseIri) -> { + if (iri.startsWith(baseIri)) { + matches.put(iri.replaceFirst(baseIri, prefixName), baseIri.length()); + } + }); + + List matchesByLength = new ArrayList<>(matches.keySet()); + matchesByLength.sort((left, right) -> { + // inverse order, so we get the longest match first + return matches.get(right).compareTo(matches.get(left)); + }); + + if (matchesByLength.size() > 0) { + return matchesByLength.get(0); + } else { + // no matching prefix + return iri; + } + } + + @Override + public String absolutize(String iri) throws PrefixDeclarationException { + URI relative = URI.create(iri); + + if (relative.isAbsolute()) { + return iri; + } else { + return getBase() + iri; + } + } + + @Override + public Iterator iterator() { + return this.prefixes.keySet().iterator(); + } + + /** + * Merge another set of prefix declarations. + * + * @param other the set of prefix declarations to merge. Conflicting prefixes + * will be renamed. + * + * @return this + */ + public MergeablePrefixDeclarations mergePrefixDeclarations(final PrefixDeclarations other) { + for (String prefixName : other) { + String iri; + try { + iri = other.getPrefix(prefixName); + } catch (PrefixDeclarationException e) { + // this shouldn't throw, since we already know that prefix is defined. + throw new RuntimeException(e); + } + + this.prefixes.put(prefixName, iri); + } + + return this; + } + + private String getFreshPrefix() { + for (long idx = nextIndex; true; ++idx) { + String freshPrefix = GENERATED_PREFIX_PREFIX + idx; + + if (!prefixes.containsKey(freshPrefix)) { + this.nextIndex = idx + 1; + return freshPrefix; + } + } + } +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java index 3198deafa..06915b4b1 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -25,6 +25,7 @@ import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.StatementVisitor; +import org.semanticweb.vlog4j.core.model.implementation.MergeablePrefixDeclarations; /*- * #%L @@ -168,13 +169,12 @@ public Void visit(final DataSourceDeclaration statement) { */ private final LinkedHashSet statements = new LinkedHashSet<>(); -// TODO support prefixes -// /** -// * Known prefixes that can be used to pretty-print the contents of the knowledge -// * base. We try to preserve user-provided prefixes found in files when loading -// * data. -// */ -// PrefixDeclarations prefixDeclarations; + /** + * Known prefixes that can be used to pretty-print the contents of the knowledge + * base. We try to preserve user-provided prefixes found in files when loading + * data. + */ + private MergeablePrefixDeclarations prefixDeclarations = new MergeablePrefixDeclarations(); /** * Index structure that organises all facts by their predicate. @@ -446,17 +446,19 @@ Map> getFactsByPredicate() { /** * Import rules from a file. * - * @param file the file to import - * @param parseFunction a function that transforms a {@link KnowledgeBase} using the {@link InputStream}. + * @param file the file to import + * @param parseFunction a function that transforms a {@link KnowledgeBase} using + * the {@link InputStream}. * - * @throws IOException when reading {@code file} fails - * @throws IllegalArgumentException when {@code file} is null or has already been imported - * @throws RuntimeException when parseFunction throws + * @throws IOException when reading {@code file} fails + * @throws IllegalArgumentException when {@code file} is null or has already + * been imported + * @throws RuntimeException when parseFunction throws * + * @return this */ - public KnowledgeBase importRulesFile(File file, - BiFunction parseFunction) - throws RuntimeException, IOException, IllegalArgumentException { + public KnowledgeBase importRulesFile(File file, BiFunction parseFunction) + throws RuntimeException, IOException, IllegalArgumentException { Validate.notNull(file, "file must not be null"); boolean isNewFile = importedFilePaths.add(file.getCanonicalPath()); @@ -466,4 +468,18 @@ public KnowledgeBase importRulesFile(File file, return parseFunction.apply(stream, this); } } + + /** + * Merge {@link PrefixDeclarations} into this knowledge base. + * + */ + public KnowledgeBase mergePrefixDeclarations(PrefixDeclarations prefixDeclarations) { + this.prefixDeclarations.mergePrefixDeclarations(prefixDeclarations); + + return this; + } + + public PrefixDeclarations getPrefixDeclarations() { + return this.prefixDeclarations; + } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java index bf6c10f36..4f325de6e 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,6 +22,7 @@ import java.net.URI; import java.util.HashMap; +import java.util.Iterator; import java.util.Map; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; @@ -31,7 +32,7 @@ * Implementation of {@link PrefixDeclarations} that is used when parsing data * from a single source. In this case, attempts to re-declare prefixes or the * base IRI will lead to errors. - * + * * @author Markus Kroetzsch * */ @@ -92,4 +93,9 @@ public String absolutize(String iri) throws PrefixDeclarationException { } } + @Override + public Iterator iterator() { + return this.prefixes.keySet().iterator(); + } + } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index d1395001e..93e81cb9f 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -262,7 +262,9 @@ static KnowledgeBase doParse(final JavaCCParser parser) throws ParsingException LOGGER.error("Exception while parsing Knowledge Base!", e); throw new ParsingException("Exception while parsing Knowledge Base.", e); } - return parser.getKnowledgeBase(); + + KnowledgeBase knowledgeBase = parser.getKnowledgeBase(); + return knowledgeBase.mergePrefixDeclarations(parser.getPrefixDeclarations()); } protected static DataSourceDeclaration parseAndExtractDatasourceDeclaration(final JavaCCParser parser) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java index 9036699b7..ca363c251 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java @@ -54,7 +54,7 @@ public KnowledgeBase handleDirective(List arguments, final Su try { return knowledgeBase.importRulesFile(file, (InputStream stream, KnowledgeBase kb) -> { try { - RuleParser.parseInto(kb, stream, parserConfiguration, prefixDeclarations.getBase()); + RuleParser.parseInto(kb, stream, parserConfiguration, prefixDeclarations.getBase()); } catch (ParsingException e) { throw new RuntimeException(e); } From 22aa3fc11dbd4e8e67f4a30fbb457329b6333d09 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 11 Feb 2020 13:59:36 +0100 Subject: [PATCH 0536/1003] Core: Add license headers to VLogKnowledgeBase --- .../vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java | 1 - 1 file changed, 1 deletion(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java index cc192cce2..e121399a9 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java @@ -150,7 +150,6 @@ Set getRules() { } /** - * * Local visitor implementation for processing statements upon loading. Internal * index structures are updated based on the statements that are detected. * From cd2407d2336b1d3f100e43e7c248b5791216b37d Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 11 Feb 2020 14:08:35 +0100 Subject: [PATCH 0537/1003] Graal: Fix Javadoc Graal: Fix style --- .../vlog4j/graal/GraalToVLog4JModelConverter.java | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverter.java b/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverter.java index fe44ed0b9..5b93b173a 100644 --- a/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverter.java +++ b/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverter.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -93,8 +93,8 @@ public static Fact convertAtomToFact(final fr.lirmm.graphik.graal.api.core.Atom * Atoms} into a {@link List} of {@link PositiveLiteral VLog4J * PositiveLiterals}. * - * @param literals list of {@link fr.lirmm.graphik.graal.api.core.Atom Graal - * Atoms}. + * @param atoms list of {@link fr.lirmm.graphik.graal.api.core.Atom Graal + * Atoms}. * @return A {@link List} of {@link PositiveLiteral VLog4J PositiveLiterals}. */ public static List convertAtoms(final List atoms) { @@ -109,8 +109,8 @@ public static List convertAtoms(final List convertAtomsToFacts(final List atoms) { @@ -254,7 +254,7 @@ public static List convertRules(final List"a". Graal Constant with identifier * "c" will be transformed to vlog4j Constant with name * "<c>". - * + * * @throws GraalConvertException If the term is neither variable nor constant. */ private static Term convertTerm(final fr.lirmm.graphik.graal.api.core.Term term, From 36b37fe21467e2c3d603b1d85ff4bbc5eea3296b Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 11 Feb 2020 14:08:41 +0100 Subject: [PATCH 0538/1003] Parser: Fix Javadoc Parser: Fix javadoc Parser: Fix style --- .../parser/DataSourceDeclarationHandler.java | 10 ++-- .../parser/DatatypeConstantHandler.java | 5 +- .../vlog4j/parser/DirectiveArgument.java | 14 ++--- .../vlog4j/parser/DirectiveHandler.java | 6 +-- .../vlog4j/parser/ParserConfiguration.java | 25 +++++---- .../ImportFileRelativeDirectiveHandler.java | 2 - .../parser/javacc/JavaCCParserBase.java | 54 ++++++++++++------- .../parser/javacc/SubParserFactory.java | 11 ++-- .../vlog4j/syntax/parser/EntityTest.java | 5 +- .../RuleParserConfigurableLiteralTest.java | 17 +++--- .../parser/RuleParserDataSourceTest.java | 11 ++-- .../vlog4j/syntax/parser/RuleParserTest.java | 9 +--- 12 files changed, 88 insertions(+), 81 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java index 4ec871f68..bbc6a359c 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * VLog4j Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 VLog4j Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,8 +20,6 @@ * #L% */ - -import org.semanticweb.vlog4j.parser.DirectiveHandler; import org.semanticweb.vlog4j.core.model.api.DataSource; /** diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DatatypeConstantHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DatatypeConstantHandler.java index c584b876a..eec3b4fcd 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DatatypeConstantHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DatatypeConstantHandler.java @@ -34,8 +34,9 @@ public interface DatatypeConstantHandler { * * @param lexicalForm lexical representation of the constant. * - * @throws ParsingException when the given representation is invalid for this datatype. - + * @throws ParsingException when the given representation is invalid for this + * datatype. + * * @return */ public DatatypeConstant createConstant(String lexicalForm) throws ParsingException; diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveArgument.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveArgument.java index d7fc50b95..f9e71f5e6 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveArgument.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveArgument.java @@ -39,9 +39,9 @@ private DirectiveArgument() { /** * Apply a function to the contained value. * - * @argument stringHandler the function to apply to a string argument - * @argument iriHandler the function to apply to an IRI - * @argument termHandler the function to apply to a Term + * @param stringHandler the function to apply to a string argument + * @param iriHandler the function to apply to an IRI + * @param termHandler the function to apply to a Term * * @return the value returned by the appropriate handler function */ @@ -51,7 +51,7 @@ public abstract V apply(Function stringHandler, /** * Partially compare two arguments, without comparing the actual values. * - * @argument other the Object to compare to. + * @param other the Object to compare to. * * @return An {@link Optional} containing true if the arguments are surely * equal, containing false if the arguments are not equal, or an empty @@ -77,7 +77,7 @@ protected Optional isEqual(Object other) { /** * Create an argument containing a String. * - * @argument value the string value + * @param value the string value * * @return An argument containing the given string value */ @@ -111,7 +111,7 @@ public int hashCode() { /** * Create an argument containing a IRI. * - * @argument value the IRI value + * @param value the IRI value * * @return An argument containing the given IRI value */ @@ -145,7 +145,7 @@ public int hashCode() { /** * Create an argument containing a Term. * - * @argument value the Term value + * @param value the Term value * * @return An argument containing the given Term value */ diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java index 2a8c4a070..678bafca4 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java @@ -160,7 +160,7 @@ public static Term validateTermArgument(final DirectiveArgument argument, final /** * Obtain a {@link KnowledgeBase} from a {@link SubParserFactory}. * - * @argument subParserFactory the SubParserFactory. + * @param subParserFactory the SubParserFactory. * * @return the knowledge base. */ @@ -173,7 +173,7 @@ default KnowledgeBase getKnowledgeBase(SubParserFactory subParserFactory) { /** * Obtain a {@link ParserConfiguration} from a {@link SubParserFactory}. * - * @argument subParserFactory the SubParserFactory. + * @param subParserFactory the SubParserFactory. * * @return the parser configuration. */ @@ -186,7 +186,7 @@ default ParserConfiguration getParserConfiguration(SubParserFactory subParserFac /** * Obtain {@link PrefixDeclarations} from a {@link SubParserFactory}. * - * @argument subParserFactory the SubParserFactory. + * @param subParserFactory the SubParserFactory. * * @return the prefix declarations. */ diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index 5f59f75da..8b766bf66 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -20,8 +20,8 @@ * #L% */ -import java.util.HashMap; import java.util.Arrays; +import java.util.HashMap; import java.util.List; import org.apache.commons.lang3.Validate; @@ -127,8 +127,7 @@ public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(final Strin * Parse a constant with optional data type. * * @param lexicalForm the (unescaped) lexical form of the constant. - * @param languageTag the language tag, or null if not present. - * @param the datatype, or null if not present. + * @param datatype the datatype, or null if not present. * * @throws ParsingException when the lexical form is invalid for the given data * type. @@ -203,8 +202,8 @@ public ParserConfiguration registerDatatype(final String name, final DatatypeCon /** * Register a custom literal handler. * - * @argument delimiter the delimiter to handle. - * @argument handler the handler for this literal type. + * @param delimiter the delimiter to handle. + * @param handler the handler for this literal type. * * @throws IllegalArgumentException when the literal delimiter has already been * registered. @@ -223,8 +222,8 @@ public ParserConfiguration registerLiteral(ConfigurableLiteralDelimiter delimite /** * Register a directive. * - * @argument name the name of the directive. - * @argument handler the handler for this directive. + * @param name the name of the directive. + * @param handler the handler for this directive. * * @throws IllegalArgumentException when the directive name has already been * registered, or is a reserved name (i.e., one @@ -246,16 +245,16 @@ public ParserConfiguration registerDirective(String name, DirectiveHandler arguments, SubParserFactory subParserFactory) - throws ParsingException { + public KnowledgeBase parseDirectiveStatement(String name, List arguments, + SubParserFactory subParserFactory) throws ParsingException { final DirectiveHandler handler = this.directives.get(name); if (handler == null) { @@ -267,9 +266,9 @@ public KnowledgeBase parseDirectiveStatement(String name, List= s.length() - 1) - throw new ParseException("Illegal escape at end of string, line:" + line + ", column: " + column); + throw new ParseException("Illegal escape at end of string, line: " + line + ", column: " + column); char ch2 = s.charAt(i + 1); column = column + 1; i = i + 1; @@ -222,32 +226,46 @@ static String unescape(String s, char escape, boolean pointCodeOnly, int line, i // Not just codepoints. Must be a legal escape. char ch3 = 0; switch (ch2) { - case 'n': + case 'n': + ch3 = '\n'; - break; + break; + case 't': + ch3 = '\t'; - break; + break; + case 'r': + ch3 = '\r'; - break; + break; + case 'b': + ch3 = '\b'; - break; + break; + case 'f': + ch3 = '\f'; - break; + break; + case '\'': - ch3 = '\''; - break; + ch3 = '\''; + break; + case '\"': - ch3 = '\"'; - break; + ch3 = '\"'; + break; + case '\\': - ch3 = '\\'; - break; + ch3 = '\\'; + break; + default: - throw new ParseException("Unknown escape: \\" + ch2 + ", line:" + line + ", column: " + column); + + throw new ParseException("Unknown escape: \\" + ch2 + ", line: " + line + ", column: " + column); } sb.append(ch3); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java index c92c4be83..75019ef77 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java @@ -30,10 +30,10 @@ import org.semanticweb.vlog4j.parser.RuleParser; /** - * Factory for creating a SubParser sharing configuration, (semantic) - * state, and prefixes, but with an independent input stream, to be - * used, e.g., for parsing arguments in data source declarations. The - * parser will start in the {@code DEFAULT} lexical state. + * Factory for creating a SubParser sharing configuration, (semantic) state, and + * prefixes, but with an independent input stream, to be used, e.g., for parsing + * arguments in data source declarations. The parser will start in the + * {@code DEFAULT} lexical state. * * @author Maximilian Marx */ @@ -45,8 +45,7 @@ public class SubParserFactory { /** * Construct a SubParserFactory. * - * @param parser the parser instance to get the (semantic) state - * from. + * @param parser the parser instance to get the (semantic) state from. */ SubParserFactory(final JavaCCParser parser) { this.knowledgeBase = parser.getKnowledgeBase(); diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java index 3ca6b90d2..c1b13d3b0 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java @@ -19,7 +19,8 @@ * limitations under the License. * #L% */ -import static org.junit.Assert.assertEquals; + +import static org.junit.Assert.*; import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.Conjunction; @@ -36,8 +37,6 @@ import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.model.implementation.LanguageStringConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; -import org.semanticweb.vlog4j.parser.ParsingException; -import org.semanticweb.vlog4j.parser.RuleParser; public class EntityTest { diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java index 09770733d..12066069c 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java @@ -248,7 +248,7 @@ public void parseLiteral_multipleNestedBracketLiterals_succeeds() throws Parsing Literal result = RuleParser.parseLiteral(input, parserConfiguration); List constants = result.getConstants().collect(Collectors.toList()); List expected = new ArrayList<>( - Arrays.asList(makeReversedConstant("[test]"), makeReversedConstant("[tset], [tst]"))); + Arrays.asList(makeReversedConstant("[test]"), makeReversedConstant("[tset], [tst]"))); assertEquals(expected, constants); } @@ -256,11 +256,12 @@ public void parseLiteral_multipleNestedBracketLiterals_succeeds() throws Parsing public void parseLiteral_mixedAndNestedLiterals_succeeds() throws ParsingException { ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler) - .registerLiteral(ConfigurableLiteralDelimiter.HASH, hashHandler) - .registerLiteral(ConfigurableLiteralDelimiter.PAREN, parenHandler) - .registerLiteral(ConfigurableLiteralDelimiter.BRACE, braceHandler) - .registerLiteral(ConfigurableLiteralDelimiter.BRACKET, bracketHandler); - Literal result = RuleParser.parseLiteral("p(|{}|, #test#, [|test, #test#, test|], ([], {}, [{[{}]}]))", parserConfiguration); + .registerLiteral(ConfigurableLiteralDelimiter.HASH, hashHandler) + .registerLiteral(ConfigurableLiteralDelimiter.PAREN, parenHandler) + .registerLiteral(ConfigurableLiteralDelimiter.BRACE, braceHandler) + .registerLiteral(ConfigurableLiteralDelimiter.BRACKET, bracketHandler); + Literal result = RuleParser.parseLiteral("p(|{}|, #test#, [|test, #test#, test|], ([], {}, [{[{}]}]))", + parserConfiguration); List constants = result.getConstants().collect(Collectors.toList()); List expected = new ArrayList<>( Arrays.asList(pipeConstant, hashConstant, bracketConstant, parenConstant)); @@ -272,8 +273,8 @@ static Constant makeReversedConstant(String name) { return Expressions.makeAbstractConstant(builder.reverse().toString()); } - static ConfigurableLiteralHandler reversingHandler = - (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm); + static ConfigurableLiteralHandler reversingHandler = (String syntacticForm, + SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm); static ConfigurableLiteralHandler getMockLiteralHandler(ConfigurableLiteralDelimiter delimiter, Constant constant) { ConfigurableLiteralHandler handler = mock(ConfigurableLiteralHandler.class); diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java index 4d045d292..8554ce8e1 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java @@ -19,11 +19,9 @@ * limitations under the License. * #L% */ -import static org.junit.Assert.assertEquals; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; + +import static org.junit.Assert.*; +import static org.mockito.Mockito.*; import java.io.File; import java.io.IOException; @@ -150,7 +148,8 @@ public void testCustomDataSource() throws ParsingException { DirectiveArgument.string("world")); RuleParser.parseDataSourceDeclaration(input, parserConfiguration); - verify(handler).handleDirective(eq(expectedArguments), ArgumentMatchers.any()); + verify(handler).handleDirective(ArgumentMatchers.eq(expectedArguments), + ArgumentMatchers.any()); } @Test diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 63ab37258..7f44b75a5 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -19,9 +19,8 @@ * limitations under the License. * #L% */ -import static org.junit.Assert.assertEquals; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.mock; +import static org.junit.Assert.*; +import static org.mockito.Mockito.*; import java.util.ArrayList; import java.util.Arrays; @@ -42,10 +41,6 @@ import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.parser.DatatypeConstantHandler; -import org.semanticweb.vlog4j.parser.ParserConfiguration; -import org.semanticweb.vlog4j.parser.ParsingException; -import org.semanticweb.vlog4j.parser.RuleParser; public class RuleParserTest { From 4f1d069a7ce95b267ae6045d75a1bca28432e186 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 11 Feb 2020 14:19:22 +0100 Subject: [PATCH 0539/1003] Fix source version for javadoc --- pom.xml | 1 + 1 file changed, 1 insertion(+) diff --git a/pom.xml b/pom.xml index c521ee208..ac95c88c5 100644 --- a/pom.xml +++ b/pom.xml @@ -305,6 +305,7 @@ maven-javadoc-plugin ${maven.javadoc.version} + 1.8 VLog4j homepage]]> From 822f77b61fbf20b912e18f2eda9ba98c842f9350 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 11 Feb 2020 14:36:08 +0100 Subject: [PATCH 0540/1003] Core: Add KnowledgeBase tests --- .../core/reasoner/KnowledgeBaseTest.java | 24 +++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java index 61a274f23..d58b64d9c 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -29,8 +29,10 @@ import org.junit.Before; import org.junit.Test; import org.mockito.internal.util.collections.Sets; +import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.model.implementation.MergeablePrefixDeclarations; public class KnowledgeBaseTest { @@ -93,4 +95,22 @@ public void testDoRemoveStatementInexistentPredicate() { } + @Test + public void getPrefixDeclarations_default_hasEmptyBase() { + assertEquals(this.kb.getPrefixDeclarations().getBase(), ""); + } + + @Test(expected = PrefixDeclarationException.class) + public void getPrefixDeclarations_defaultUndeclaredPrefix_throws() throws PrefixDeclarationException { + this.kb.getPrefixDeclarations().getPrefix("ex"); + } + + @Test + public void mergePrefixDeclarations_merge_succeeds() throws PrefixDeclarationException { + String iri = "https://example.org"; + MergeablePrefixDeclarations prefixDeclarations = new MergeablePrefixDeclarations(); + prefixDeclarations.setPrefix("ex", iri); + this.kb.mergePrefixDeclarations(prefixDeclarations); + assertEquals(this.kb.getPrefixDeclarations().getPrefix("ex"), iri); + } } From 207e84674dc36e0aa625a62a2c008d91d71f2b95 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 11 Feb 2020 14:36:30 +0100 Subject: [PATCH 0541/1003] Parser: Rename tests to match code package name --- .../org/semanticweb/vlog4j/{syntax => }/parser/EntityTest.java | 2 +- .../vlog4j/{syntax => }/parser/ParserConfigurationTest.java | 2 +- .../{syntax => }/parser/RuleParserConfigurableLiteralTest.java | 2 +- .../vlog4j/{syntax => }/parser/RuleParserDataSourceTest.java | 2 +- .../vlog4j/{syntax => }/parser/RuleParserParseFactTest.java | 2 +- .../semanticweb/vlog4j/{syntax => }/parser/RuleParserTest.java | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) rename vlog4j-parser/src/test/java/org/semanticweb/vlog4j/{syntax => }/parser/EntityTest.java (99%) rename vlog4j-parser/src/test/java/org/semanticweb/vlog4j/{syntax => }/parser/ParserConfigurationTest.java (98%) rename vlog4j-parser/src/test/java/org/semanticweb/vlog4j/{syntax => }/parser/RuleParserConfigurableLiteralTest.java (99%) rename vlog4j-parser/src/test/java/org/semanticweb/vlog4j/{syntax => }/parser/RuleParserDataSourceTest.java (99%) rename vlog4j-parser/src/test/java/org/semanticweb/vlog4j/{syntax => }/parser/RuleParserParseFactTest.java (98%) rename vlog4j-parser/src/test/java/org/semanticweb/vlog4j/{syntax => }/parser/RuleParserTest.java (99%) diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/EntityTest.java similarity index 99% rename from vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java rename to vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/EntityTest.java index c1b13d3b0..64520e4ed 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/EntityTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.syntax.parser; +package org.semanticweb.vlog4j.parser; /*- * #%L diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/ParserConfigurationTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/ParserConfigurationTest.java similarity index 98% rename from vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/ParserConfigurationTest.java rename to vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/ParserConfigurationTest.java index f89c5f012..e6f83e8f1 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/ParserConfigurationTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/ParserConfigurationTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.syntax.parser; +package org.semanticweb.vlog4j.parser; /*- * #%L diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserConfigurableLiteralTest.java similarity index 99% rename from vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java rename to vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserConfigurableLiteralTest.java index 12066069c..c69bceae6 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserConfigurableLiteralTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.syntax.parser; +package org.semanticweb.vlog4j.parser; /*- * #%L diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserDataSourceTest.java similarity index 99% rename from vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java rename to vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserDataSourceTest.java index 8554ce8e1..c42975d90 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserDataSourceTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.syntax.parser; +package org.semanticweb.vlog4j.parser; /*- * #%L diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserParseFactTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserParseFactTest.java similarity index 98% rename from vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserParseFactTest.java rename to vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserParseFactTest.java index 0d0bd03be..f065eed5e 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserParseFactTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserParseFactTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.syntax.parser; +package org.semanticweb.vlog4j.parser; /*- * #%L diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java similarity index 99% rename from vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java rename to vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java index 7f44b75a5..eae805c6a 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.syntax.parser; +package org.semanticweb.vlog4j.parser; /*- * #%L From 7b9a6d9f5bf917744d735c22d8ac9b7d0c6b5224 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 11 Feb 2020 17:10:50 +0100 Subject: [PATCH 0542/1003] Parser: Improve test coverage --- .../PrefixDeclarationException.java | 12 ++- .../vlog4j/parser/DirectiveArgument.java | 14 +-- .../vlog4j/parser/DirectiveHandler.java | 4 +- .../parser/LocalPrefixDeclarations.java | 10 +- ...eryResultDataSourceDeclarationHandler.java | 13 ++- .../vlog4j/parser/javacc/JavaCCParser.jj | 10 +- .../parser/javacc/JavaCCParserBase.java | 14 +-- .../vlog4j/parser/DirectiveHandlerTest.java | 96 +++++++++++++++++ .../parser/ParserConfigurationTest.java | 80 ++++++++++++-- .../vlog4j/parser/RuleParserTest.java | 7 ++ .../parser/javacc/JavaCCParserBaseTest.java | 102 ++++++++++++++++++ 11 files changed, 325 insertions(+), 37 deletions(-) create mode 100644 vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/DirectiveHandlerTest.java create mode 100644 vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBaseTest.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java index f3806c21d..7c209c5cf 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,12 +21,16 @@ */ public class PrefixDeclarationException extends VLog4jException { - /** - * + /** + * */ private static final long serialVersionUID = 1L; public PrefixDeclarationException(String errorMessage) { super(errorMessage); } + + public PrefixDeclarationException(String errorMessage, Throwable cause) { + super(errorMessage, cause); + } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveArgument.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveArgument.java index f9e71f5e6..51190723e 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveArgument.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveArgument.java @@ -20,7 +20,7 @@ * #L% */ -import java.net.URL; +import java.net.URI; import java.util.Optional; import java.util.function.Function; @@ -46,7 +46,7 @@ private DirectiveArgument() { * @return the value returned by the appropriate handler function */ public abstract V apply(Function stringHandler, - Function iriHandler, Function termHandler); + Function iriHandler, Function termHandler); /** * Partially compare two arguments, without comparing the actual values. @@ -85,7 +85,7 @@ public static DirectiveArgument string(String value) { return new DirectiveArgument() { @Override public V apply(Function stringHandler, - Function iriHandler, Function termHandler) { + Function iriHandler, Function termHandler) { return stringHandler.apply(value); } @@ -115,11 +115,11 @@ public int hashCode() { * * @return An argument containing the given IRI value */ - public static DirectiveArgument iri(URL value) { + public static DirectiveArgument iri(URI value) { return new DirectiveArgument() { @Override public V apply(Function stringHandler, - Function iriHandler, Function termHandler) { + Function iriHandler, Function termHandler) { return iriHandler.apply(value); } @@ -153,7 +153,7 @@ public static DirectiveArgument term(Term value) { return new DirectiveArgument() { @Override public V apply(Function stringHandler, - Function iriHandler, Function termHandler) { + Function iriHandler, Function termHandler) { return termHandler.apply(value); } @@ -192,7 +192,7 @@ public Optional fromString() { * @return An optional containing the contained IRI, or an empty Optional if the * argument doesn't contain a IRI. */ - public Optional fromIri() { + public Optional fromIri() { return this.apply(value -> Optional.empty(), Optional::of, value -> Optional.empty()); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java index 678bafca4..c97cf0591 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java @@ -22,7 +22,7 @@ import java.io.File; import java.io.IOException; -import java.net.URL; +import java.net.URI; import java.util.List; import java.util.NoSuchElementException; @@ -128,7 +128,7 @@ public static File validateFilenameArgument(final DirectiveArgument argument, fi * * @return the contained IRI. */ - public static URL validateIriArgument(final DirectiveArgument argument, final String description) + public static URI validateIriArgument(final DirectiveArgument argument, final String description) throws ParsingException { try { return argument.fromIri().get(); diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java index 4f325de6e..6c234806e 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java @@ -21,6 +21,7 @@ */ import java.net.URI; +import java.net.URISyntaxException; import java.util.HashMap; import java.util.Iterator; import java.util.Map; @@ -85,7 +86,14 @@ public String resolvePrefixedName(String prefixedName) throws PrefixDeclarationE } public String absolutize(String iri) throws PrefixDeclarationException { - URI relative = URI.create(iri); + URI relative; + + try { + relative = new URI(iri); + } catch (URISyntaxException e) { + throw new PrefixDeclarationException("Failed to parse IRI", e); + } + if (relative.isAbsolute()) { return iri; } else { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java index ff178435c..036036fb7 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.parser.datasources; +import java.net.MalformedURLException; + /*- * #%L * VLog4j Parser @@ -20,6 +22,7 @@ * #L% */ +import java.net.URI; import java.net.URL; import java.util.List; @@ -41,10 +44,16 @@ public class SparqlQueryResultDataSourceDeclarationHandler implements DataSource public DataSource handleDirective(List arguments, final SubParserFactory subParserFactory) throws ParsingException { DirectiveHandler.validateNumberOfArguments(arguments, 3); - URL endpoint = DirectiveHandler.validateIriArgument(arguments.get(0), "SPARQL endpoint"); + URI endpoint = DirectiveHandler.validateIriArgument(arguments.get(0), "SPARQL endpoint"); String variables = DirectiveHandler.validateStringArgument(arguments.get(1), "variables list"); String query = DirectiveHandler.validateStringArgument(arguments.get(2), "query fragment"); - return new SparqlQueryResultDataSource(endpoint, variables, query); + URL endpointURL; + try { + endpointURL = endpoint.toURL(); + } catch (MalformedURLException e) { + throw new ParsingException("URI \"" + endpoint + "\" is not a valid URL", e); + } + return new SparqlQueryResultDataSource(endpointURL, variables, query); } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 7024f63c3..5cbd83f81 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -17,8 +17,8 @@ package org.semanticweb.vlog4j.parser.javacc; import java.io.File; import java.io.InputStream; import java.io.IOException; -import java.net.URL; -import java.net.MalformedURLException; +import java.net.URI; +import java.net.URISyntaxException; import java.util.List; import java.util.Deque; @@ -411,10 +411,10 @@ LinkedList< DirectiveArgument > Arguments() throws PrefixDeclarationException : } { ( LOOKAHEAD(String()) str = String() { argument = DirectiveArgument.string(str); } | LOOKAHEAD(absoluteIri()) str = absoluteIri() { - URL url; + URI url; try { - url = new URL(str); - } catch (MalformedURLException e) { + url = new URI(str); + } catch (URISyntaxException e) { throw makeParseExceptionWithCause("Error parsing IRIhandler: " + e.getMessage(), e); } argument = DirectiveArgument.iri(url); diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 43b42f58c..ed2f353ba 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -151,8 +151,8 @@ NamedNull createNamedNull(String lexicalForm) { /** * Creates a suitable {@link Constant} from the parsed data. * - * @param string the string data (unescaped) - * @param datatype the datatype, or null if not provided + * @param string the string data (unescaped) + * @param datatype the datatype, or null if not provided * @return suitable constant */ Constant createConstant(String lexicalForm, String datatype) throws ParseException { @@ -181,10 +181,10 @@ void addDataSource(String predicateName, int arity, DataSource dataSource) throw } static String unescapeStr(String s, int line, int column) throws ParseException { - return unescape(s, '\\', false, line, column); + return unescape(s, '\\', line, column); } - static String unescape(String s, char escape, boolean pointCodeOnly, int line, int column) throws ParseException { + static String unescape(String s, char escape, int line, int column) throws ParseException { int i = s.indexOf(escape); if (i == -1) @@ -264,7 +264,6 @@ static String unescape(String s, char escape, boolean pointCodeOnly, int line, i break; default: - throw new ParseException("Unknown escape: \\" + ch2 + ", line: " + line + ", column: " + column); } sb.append(ch3); @@ -273,7 +272,7 @@ static String unescape(String s, char escape, boolean pointCodeOnly, int line, i } /** - * Remove the first and last {@code n} characters from string {@code s} + * Remove the first and last {@code n} characters from string {@code s} * * @param s string to strip delimiters from * @param n number of characters to strip from both ends @@ -352,7 +351,8 @@ Term parseConfigurableLiteral(ConfigurableLiteralDelimiter delimiter, String syn return parserConfiguration.parseConfigurableLiteral(delimiter, syntacticForm, subParserFactory); } - KnowledgeBase parseDirectiveStatement(String name, List arguments, SubParserFactory subParserFactory) throws ParseException { + KnowledgeBase parseDirectiveStatement(String name, List arguments, + SubParserFactory subParserFactory) throws ParseException { try { return parserConfiguration.parseDirectiveStatement(name, arguments, subParserFactory); } catch (ParsingException e) { diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/DirectiveHandlerTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/DirectiveHandlerTest.java new file mode 100644 index 000000000..eab233671 --- /dev/null +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/DirectiveHandlerTest.java @@ -0,0 +1,96 @@ +package org.semanticweb.vlog4j.parser; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.net.URI; + +import org.junit.Test; +import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; + +public class DirectiveHandlerTest { + private static final String STRING = "src/test/resources/facts.rls"; + private static final URI IRI = URI.create("https://example.org"); + private static final Term TERM = Expressions.makeDatatypeConstant(STRING, IRI.toString()); + + private static final DirectiveArgument STRING_ARGUMENT = DirectiveArgument.string(STRING); + private static final DirectiveArgument IRI_ARGUMENT = DirectiveArgument.iri(IRI); + private static final DirectiveArgument TERM_ARGUMENT = DirectiveArgument.term(TERM); + + @Test + public void validateStringArgument_stringArgument_succeeds() throws ParsingException { + assertEquals(DirectiveHandler.validateStringArgument(STRING_ARGUMENT, "string argument"), STRING); + } + + @Test(expected = ParsingException.class) + public void validateStringArgument_iriArgument_throws() throws ParsingException { + DirectiveHandler.validateStringArgument(IRI_ARGUMENT, "string argument"); + } + + @Test(expected = ParsingException.class) + public void validateStringArgument_termArgument_throws() throws ParsingException { + DirectiveHandler.validateStringArgument(TERM_ARGUMENT, "string argument"); + } + + @Test + public void validateIriArgument_iriArgument_succeeds() throws ParsingException { + assertEquals(DirectiveHandler.validateIriArgument(IRI_ARGUMENT, "iri argument"), IRI); + } + + @Test(expected = ParsingException.class) + public void validateIriArgument_StringArgument_throws() throws ParsingException { + DirectiveHandler.validateIriArgument(STRING_ARGUMENT, "iri argument"); + } + + @Test(expected = ParsingException.class) + public void validateIriArgument_termArgument_throws() throws ParsingException { + DirectiveHandler.validateIriArgument(TERM_ARGUMENT, "iri argument"); + } + + @Test + public void validateTermArgument_termArgument_succeeds() throws ParsingException { + assertEquals(DirectiveHandler.validateTermArgument(TERM_ARGUMENT, "term argument"), TERM); + } + + @Test(expected = ParsingException.class) + public void validateTermArgument_stringArgument_throws() throws ParsingException { + DirectiveHandler.validateTermArgument(STRING_ARGUMENT, "term argument"); + } + + @Test(expected = ParsingException.class) + public void validateTermArgument_iriArgument_throws() throws ParsingException { + DirectiveHandler.validateTermArgument(IRI_ARGUMENT, "term argument"); + } + + @Test + public void validateFilenameArgument_filename_succeeds() throws ParsingException { + assertEquals(DirectiveHandler.validateFilenameArgument(STRING_ARGUMENT, "filename argument").getPath(), STRING); + } + + @Test + public void validateFilenameArgument_invalidFilename_throws() throws ParsingException { + DirectiveHandler.validateFilenameArgument(DirectiveArgument.string(STRING + "-nonexistant"), + "filename argument"); + } + +} diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/ParserConfigurationTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/ParserConfigurationTest.java index e6f83e8f1..e78a42ca5 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/ParserConfigurationTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/ParserConfigurationTest.java @@ -20,47 +20,109 @@ * #L% */ -import static org.mockito.Mockito.*; +import static org.junit.Assert.*; +import java.util.ArrayList; + +import org.junit.Before; import org.junit.Test; +import org.mockito.Mock; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.DataSourceDeclarationHandler; import org.semanticweb.vlog4j.parser.DatatypeConstantHandler; import org.semanticweb.vlog4j.parser.ParserConfiguration; -import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; +import org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase.ConfigurableLiteralDelimiter; public class ParserConfigurationTest { private static final String TYPE_NAME = "test-type"; private static final String SOURCE_NAME = "test-source"; + private static final String DIRECTIVE_NAME = "test-directive"; + + private ParserConfiguration parserConfiguration; - private final DatatypeConstantHandler datatypeConstantHandler = mock(DatatypeConstantHandler.class); - private final DataSourceDeclarationHandler dataSourceDeclarationHandler = mock(DataSourceDeclarationHandler.class); + @Mock + private DatatypeConstantHandler datatypeConstantHandler; + @Mock + private DataSourceDeclarationHandler dataSourceDeclarationHandler; + @Mock + private SubParserFactory subParserFactory; + @Mock + private DirectiveHandler directiveHandler; + + @Before + public void init() { + parserConfiguration = new ParserConfiguration(); + } @Test(expected = IllegalArgumentException.class) public void registerDataSource_duplicateName_throws() { - ParserConfiguration parserConfiguration = new ParserConfiguration(); - parserConfiguration.registerDataSource(SOURCE_NAME, dataSourceDeclarationHandler) .registerDataSource(SOURCE_NAME, dataSourceDeclarationHandler); } @Test(expected = IllegalArgumentException.class) public void registerDatatype_duplicateName_throws() { - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerDatatype(TYPE_NAME, datatypeConstantHandler).registerDatatype(TYPE_NAME, datatypeConstantHandler); } @Test public void registerDataSource_datatypeName_succeeds() { - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerDatatype(TYPE_NAME, datatypeConstantHandler).registerDataSource(TYPE_NAME, dataSourceDeclarationHandler); } @Test public void registerDatatype_dataSourceName_succeeds() { - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerDataSource(SOURCE_NAME, dataSourceDeclarationHandler).registerDatatype(SOURCE_NAME, datatypeConstantHandler); } + + @Test + public void isParsingOfNamedNullsAllowed_default_returnsFalse() { + assertFalse("named nulls are disallowed by default", parserConfiguration.isParsingOfNamedNullsAllowed()); + } + + @Test + public void isParsingOfNamedNullsAllowed_enabled_returnsTrue() { + parserConfiguration.allowNamedNulls(); + assertTrue("named nulls are allowed after allowing them", parserConfiguration.isParsingOfNamedNullsAllowed()); + } + + @Test + public void isParsingOfNamedNullsAllowed_enabledAndDisabled_returnsFalse() { + parserConfiguration.allowNamedNulls(); + assertTrue("named nulls are allowed after allowing them", parserConfiguration.isParsingOfNamedNullsAllowed()); + parserConfiguration.disallowNamedNulls(); + assertFalse("named nulls are disallowed after disallowing them", + parserConfiguration.isParsingOfNamedNullsAllowed()); + } + + @Test(expected = ParsingException.class) + public void parseConfigurableLiteral_unregisteredLiteral_throws() throws ParsingException { + parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACE, "test", subParserFactory); + } + + @Test(expected = IllegalArgumentException.class) + public void registerDirective_reservedName_throws() throws IllegalArgumentException { + parserConfiguration.registerDirective("base", directiveHandler); + } + + @Test + public void registerDirective_unreserverdName_succeeds() throws IllegalArgumentException { + parserConfiguration.registerDirective(DIRECTIVE_NAME, directiveHandler); + } + + @Test(expected = IllegalArgumentException.class) + public void registerDirective_duplicateName_throws() throws IllegalArgumentException { + parserConfiguration.registerDirective(DIRECTIVE_NAME, directiveHandler); + parserConfiguration.registerDirective(DIRECTIVE_NAME, directiveHandler); + } + + @Test(expected = ParsingException.class) + public void parseDirectiveStatement_unregisteredDirective_throws() throws ParsingException { + parserConfiguration.parseDirectiveStatement(DIRECTIVE_NAME, new ArrayList<>(), subParserFactory); + } + } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java index eae805c6a..78bb44d6d 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java @@ -461,4 +461,11 @@ public void parseInto_duplicateImportStatements_throws() throws ParsingException KnowledgeBase knowledgeBase = RuleParser.parse(input); RuleParser.parseInto(knowledgeBase, input); } + + @Test(expected = ParsingException.class) + public void parseInto_duplicateRelativeImportStatements_throws() throws ParsingException { + String input = "@import \"src/test/resources/facts.rls\" . @import-relative \"src/test/resources/facts.rls\" ."; + KnowledgeBase knowledgeBase = RuleParser.parse(input); + RuleParser.parseInto(knowledgeBase, input); + } } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBaseTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBaseTest.java new file mode 100644 index 000000000..045cf373a --- /dev/null +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBaseTest.java @@ -0,0 +1,102 @@ +package org.semanticweb.vlog4j.parser.javacc; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; +import static org.mockito.Mockito.*; + +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; +import org.semanticweb.vlog4j.parser.DatatypeConstantHandler; +import org.semanticweb.vlog4j.parser.DefaultParserConfiguration; +import org.semanticweb.vlog4j.parser.ParserConfiguration; +import org.semanticweb.vlog4j.parser.ParsingException; + +public class JavaCCParserBaseTest { + private JavaCCParserBase parserBase; + private static final String DATATYPE_NAME = "https://example.org/test-type"; + + private DatatypeConstantHandler datatypeConstantHandler = mock(DatatypeConstantHandler.class); + + @Before + public void init() { + parserBase = new JavaCCParserBase(); + } + + @Rule + public ExpectedException exceptionRule = ExpectedException.none(); + + @Test + public void createConstant_undeclaredPrefix_throws() throws ParseException { + exceptionRule.expect(ParseException.class); + exceptionRule.expectMessage("Failed to parse IRI"); + parserBase.createConstant("ïnvälid://test"); + } + + @Test + public void createConstant_throwingDatatypeConstantHandler_throws() throws ParseException, ParsingException { + exceptionRule.expect(ParseException.class); + exceptionRule.expectMessage("Failed to parse Constant"); + + when(datatypeConstantHandler.createConstant(anyString())).thenThrow(ParsingException.class); + ParserConfiguration parserConfiguration = new DefaultParserConfiguration().registerDatatype(DATATYPE_NAME, + datatypeConstantHandler); + parserBase.setParserConfiguration(parserConfiguration); + parserBase.createConstant("test", DATATYPE_NAME); + } + + @Test + public void unescapeStr_escapeChars_succeeds() throws ParseException { + String input = "\\\\test\r\ntest: \\n\\t\\r\\b\\f\\'\\\"\\\\"; + String expected = "\\test\r\ntest: \n\t\r\b\f\'\"\\"; + String result = JavaCCParserBase.unescapeStr(input, 0, 0); + assertEquals(result, expected); + } + + @Test + public void unescapeStr_illegalEscapeAtEndOfString_throws() throws ParseException { + exceptionRule.expect(ParseException.class); + exceptionRule.expectMessage("Illegal escape at end of string"); + + JavaCCParserBase.unescapeStr("\\", 0, 0); + } + + @Test + public void unescapeStr_unknownEscapeSequence_throws() throws ParseException { + exceptionRule.expect(ParseException.class); + exceptionRule.expectMessage("Unknown escape"); + + JavaCCParserBase.unescapeStr("\\y", 0, 0); + } + + @Test + public void setBase_changingBase_throws() throws PrefixDeclarationException { + exceptionRule.expect(PrefixDeclarationException.class); + exceptionRule.expectMessage("Base is already defined as"); + + parserBase.setBase("https://example.org/"); + parserBase.setBase("https://example.com/"); + } +} From e6d02dc7b6b0d9ce800cab0df71b86afe7882dd7 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 11 Feb 2020 17:44:07 +0100 Subject: [PATCH 0543/1003] Core: Add some tests for MergeablePrefixDeclarations --- .../MergeablePrefixDeclarations.java | 5 +- .../MergeablePrefixDeclarationsTest.java | 104 ++++++++++++++++++ 2 files changed, 106 insertions(+), 3 deletions(-) create mode 100644 vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java index f586fa9ff..050cea6c8 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java @@ -26,7 +26,6 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.function.BiFunction; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; @@ -126,7 +125,7 @@ public String unresolveAbsoluteIri(String iri) { } @Override - public String absolutize(String iri) throws PrefixDeclarationException { + public String absolutize(String iri) { URI relative = URI.create(iri); if (relative.isAbsolute()) { @@ -167,7 +166,7 @@ public MergeablePrefixDeclarations mergePrefixDeclarations(final PrefixDeclarati private String getFreshPrefix() { for (long idx = nextIndex; true; ++idx) { - String freshPrefix = GENERATED_PREFIX_PREFIX + idx; + String freshPrefix = GENERATED_PREFIX_PREFIX + idx + ":"; if (!prefixes.containsKey(freshPrefix)) { this.nextIndex = idx + 1; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java new file mode 100644 index 000000000..ba6fd72a7 --- /dev/null +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java @@ -0,0 +1,104 @@ +package org.semanticweb.vlog4j.core.model; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; +import static org.mockito.Mockito.*; + +import org.junit.Before; +import org.junit.Test; +import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; +import org.semanticweb.vlog4j.core.model.implementation.MergeablePrefixDeclarations; + +public class MergeablePrefixDeclarationsTest { + private MergeablePrefixDeclarations prefixDeclarations; + + private static final String BASE = "https://example.org/"; + private static final String MORE_SPECIFIC = BASE + "example/"; + private static final String RELATIVE = "relative/test"; + + + @Before + public void init() { + prefixDeclarations = new MergeablePrefixDeclarations(); + } + + @Test + public void setBase_changingBase_succeeds() { + prefixDeclarations.setBase(BASE); + assertEquals(prefixDeclarations.getBase(), BASE); + prefixDeclarations.setBase(MORE_SPECIFIC); + assertEquals(prefixDeclarations.getBase(), MORE_SPECIFIC); + } + + @Test + public void setBase_redeclareSameBase_succeeds() { + prefixDeclarations.setBase(BASE); + assertEquals(prefixDeclarations.getBase(), BASE); + prefixDeclarations.setBase(BASE); + assertEquals(prefixDeclarations.getBase(), BASE); + } + + @Test + public void absolutize_noBase_identical() { + assertEquals(prefixDeclarations.absolutize(RELATIVE), RELATIVE); + } + + @Test + public void absolutize_base_absoluteIri() { + prefixDeclarations.setBase(BASE); + assertEquals(prefixDeclarations.absolutize(RELATIVE), BASE + RELATIVE); + } + + @Test + public void absolutize_absoluteIri_identical() { + assertEquals(prefixDeclarations.absolutize(BASE), BASE); + } + + @Test(expected = PrefixDeclarationException.class) + public void resolvePrefixedName_undeclaredPrefix_throws() throws PrefixDeclarationException { + prefixDeclarations.resolvePrefixedName("eg:" + RELATIVE); + } + + @Test + public void resolvePrefixedName_knownPrefix_succeeds() throws PrefixDeclarationException { + prefixDeclarations.setPrefix("eg:", BASE); + assertEquals(prefixDeclarations.resolvePrefixedName("eg:" + RELATIVE), BASE + RELATIVE); + } + + @Test + public void setPrefix_redeclarePrefix_succeeds() { + prefixDeclarations.setPrefix("eg:", BASE); + prefixDeclarations.setPrefix("eg:", MORE_SPECIFIC); + } + + @Test + public void getFreshPrefix_registeredPrefix_returnsFreshPrefix() throws PrefixDeclarationException { + String prefix = "vlog4j_generated_"; + prefixDeclarations.setPrefix(prefix + "0:", BASE + "generated/"); + prefixDeclarations.setPrefix("eg:", BASE); + prefixDeclarations.setPrefix("eg:", MORE_SPECIFIC); + + assertEquals(prefixDeclarations.getPrefix(prefix + "1:"), MORE_SPECIFIC); + } + + +} From cf3a0c61bd6f4c95aeeccad2e369fb45ba3a32d8 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 12 Feb 2020 16:24:01 +0100 Subject: [PATCH 0544/1003] Core: Don't expose PrefixDeclarations in KnowledgeBase --- .../vlog4j/core/reasoner/KnowledgeBase.java | 55 ++++++++++++++++++- .../core/reasoner/KnowledgeBaseTest.java | 26 +++++---- 2 files changed, 68 insertions(+), 13 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java index 06915b4b1..088ffda91 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -17,6 +17,7 @@ import java.util.function.BiFunction; import org.apache.commons.lang3.Validate; +import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; @@ -472,6 +473,11 @@ public KnowledgeBase importRulesFile(File file, BiFunction Date: Wed, 12 Feb 2020 17:03:18 +0100 Subject: [PATCH 0545/1003] Core: Add more tests for MergeablePrefixDeclarations --- .../MergeablePrefixDeclarations.java | 3 +- .../MergeablePrefixDeclarationsTest.java | 60 +++++++++++++++++++ 2 files changed, 62 insertions(+), 1 deletion(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java index 050cea6c8..7e8404060 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java @@ -105,7 +105,8 @@ public String unresolveAbsoluteIri(String iri) { Map matches = new HashMap<>(); prefixes.forEach((prefixName, baseIri) -> { - if (iri.startsWith(baseIri)) { + // only select proper prefixes here, since `eg:` is not a valid prefixed name. + if (iri.startsWith(baseIri) && !iri.equals(baseIri)) { matches.put(iri.replaceFirst(baseIri, prefixName), baseIri.length()); } }); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java index ba6fd72a7..7070655c2 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java @@ -23,16 +23,21 @@ import static org.junit.Assert.*; import static org.mockito.Mockito.*; +import java.util.Arrays; + import org.junit.Before; import org.junit.Test; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.implementation.MergeablePrefixDeclarations; public class MergeablePrefixDeclarationsTest { private MergeablePrefixDeclarations prefixDeclarations; private static final String BASE = "https://example.org/"; + private static final String UNRELATED = "https://example.com/"; private static final String MORE_SPECIFIC = BASE + "example/"; + private static final String EVEN_MORE_SPECIFIC = MORE_SPECIFIC + "relative/"; private static final String RELATIVE = "relative/test"; @@ -100,5 +105,60 @@ public void getFreshPrefix_registeredPrefix_returnsFreshPrefix() throws PrefixDe assertEquals(prefixDeclarations.getPrefix(prefix + "1:"), MORE_SPECIFIC); } + @Test + public void mergeablePrefixDeclarations_constructor_succeeds() throws PrefixDeclarationException { + this.prefixDeclarations.setPrefix("eg:", MORE_SPECIFIC); + MergeablePrefixDeclarations prefixDeclarations = new MergeablePrefixDeclarations(this.prefixDeclarations); + assertEquals(prefixDeclarations.getPrefix("eg:"), MORE_SPECIFIC); + } + + @Test(expected = RuntimeException.class) + public void mergePrefixDeclarations_getPrefixUnexpectedlyThrows_throws() throws PrefixDeclarationException { + PrefixDeclarations prefixDeclarations = mock(PrefixDeclarations.class); + + when(prefixDeclarations.iterator()).thenReturn(Arrays.asList("eg:", "ex:").iterator()); + when(prefixDeclarations.getPrefix(anyString())).thenThrow(PrefixDeclarationException.class); + + this.prefixDeclarations.mergePrefixDeclarations(prefixDeclarations); + } + + @Test + public void unresolveAbsoluteIri_default_identical() { + assertEquals(prefixDeclarations.unresolveAbsoluteIri(BASE), BASE); + } + + @Test + public void unresolveAbsoluteIri_declaredPrefix_succeeds() { + assertEquals(prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC), MORE_SPECIFIC); + prefixDeclarations.setPrefix("eg:", BASE); + assertEquals(prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC), "eg:example/"); + } + + @Test + public void unresolveAbsoluteIri_unrelatedPrefix_identical() { + prefixDeclarations.setPrefix("eg:", UNRELATED); + assertEquals(prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC), MORE_SPECIFIC); + } + + @Test + public void unresolveAbsoluteIri_unrelatedAndRelatedPrefixes_succeeds() { + prefixDeclarations.setPrefix("ex:", UNRELATED); + prefixDeclarations.setPrefix("eg:", BASE); + assertEquals(prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC), "eg:example/"); + } + + @Test + public void unresolveAbsoluteIri_multipleMatchingPrefixes_longestMatchWins() { + prefixDeclarations.setPrefix("eg:", BASE); + prefixDeclarations.setPrefix("ex:", MORE_SPECIFIC); + assertEquals(prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE), "ex:" + RELATIVE); + prefixDeclarations.setPrefix("er:", EVEN_MORE_SPECIFIC); + assertEquals(prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE), "er:test"); + } + @Test + public void unresolveAbsoluteIri_exactPrefixMatch_identical() { + prefixDeclarations.setPrefix("eg:", BASE); + assertEquals(prefixDeclarations.unresolveAbsoluteIri(BASE), BASE); + } } From 0fb67863cf84d76993d7077f7c574776a64003e4 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 12 Feb 2020 17:45:38 +0100 Subject: [PATCH 0546/1003] Parser: Move URL validation into DirectiveHandler --- .../vlog4j/parser/DirectiveHandler.java | 23 +++++++++++++++++++ ...eryResultDataSourceDeclarationHandler.java | 17 ++++---------- .../vlog4j/parser/DirectiveHandlerTest.java | 12 +++++++++- 3 files changed, 38 insertions(+), 14 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java index c97cf0591..b4c9cb4b3 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java @@ -22,7 +22,9 @@ import java.io.File; import java.io.IOException; +import java.net.MalformedURLException; import java.net.URI; +import java.net.URL; import java.util.List; import java.util.NoSuchElementException; @@ -137,6 +139,27 @@ public static URI validateIriArgument(final DirectiveArgument argument, final St } } + /** + * Validate that the provided argument is a {@link URL}. + * + * @param argument the argument to validate + * @param description a description of the argument, used in constructing the + * error message. + * + * @throws ParsingException when the given argument is not a valid {@link URL}. + * + * @return the {@link URL} corresponding to the contained IRI. + */ + public static URL validateUrlArgument(final DirectiveArgument argument, final String description) + throws ParsingException { + URI iri = DirectiveHandler.validateIriArgument(argument, description); + try { + return iri.toURL(); + } catch (MalformedURLException e) { + throw new ParsingException(description + "\"" + argument + "\" is not a valid URL.", e); + } + } + /** * Validate that the provided argument is a {@link Term}. * diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java index 036036fb7..aaf51047b 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java @@ -1,7 +1,5 @@ package org.semanticweb.vlog4j.parser.datasources; -import java.net.MalformedURLException; - /*- * #%L * VLog4j Parser @@ -11,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,7 +20,6 @@ * #L% */ -import java.net.URI; import java.net.URL; import java.util.List; @@ -44,16 +41,10 @@ public class SparqlQueryResultDataSourceDeclarationHandler implements DataSource public DataSource handleDirective(List arguments, final SubParserFactory subParserFactory) throws ParsingException { DirectiveHandler.validateNumberOfArguments(arguments, 3); - URI endpoint = DirectiveHandler.validateIriArgument(arguments.get(0), "SPARQL endpoint"); + URL endpoint = DirectiveHandler.validateUrlArgument(arguments.get(0), "SPARQL endpoint"); String variables = DirectiveHandler.validateStringArgument(arguments.get(1), "variables list"); String query = DirectiveHandler.validateStringArgument(arguments.get(2), "query fragment"); - URL endpointURL; - try { - endpointURL = endpoint.toURL(); - } catch (MalformedURLException e) { - throw new ParsingException("URI \"" + endpoint + "\" is not a valid URL", e); - } - return new SparqlQueryResultDataSource(endpointURL, variables, query); + return new SparqlQueryResultDataSource(endpoint, variables, query); } } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/DirectiveHandlerTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/DirectiveHandlerTest.java index eab233671..958a9ac76 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/DirectiveHandlerTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/DirectiveHandlerTest.java @@ -19,9 +19,9 @@ * limitations under the License. * #L% */ - import static org.junit.Assert.*; +import java.net.MalformedURLException; import java.net.URI; import org.junit.Test; @@ -93,4 +93,14 @@ public void validateFilenameArgument_invalidFilename_throws() throws ParsingExce "filename argument"); } + @Test + public void validateUrlArgument_url_succeeds() throws ParsingException, MalformedURLException { + assertEquals(DirectiveHandler.validateUrlArgument(IRI_ARGUMENT, "urls argument"), IRI.toURL()); + } + + @Test(expected = ParsingException.class) + public void validateUrlArgument_invalidUrl_throws() throws ParsingException { + DirectiveHandler.validateUrlArgument(DirectiveArgument.iri(URI.create("example://test")), "url argument"); + } + } From 6ede31846a8557b362f6c5efdebee5855f5ee1d4 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 12 Feb 2020 17:54:19 +0100 Subject: [PATCH 0547/1003] Parser: Add license header on JavaCCParser --- pom.xml | 7 +++++++ .../vlog4j/parser/javacc/JavaCCParser.jj | 20 +++++++++++++++++++ 2 files changed, 27 insertions(+) diff --git a/pom.xml b/pom.xml index ac95c88c5..ded2f85ff 100644 --- a/pom.xml +++ b/pom.xml @@ -135,6 +135,13 @@ src/main/java src/test/java + true + + true + + java + diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 5cbd83f81..8dcbb3baf 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -1,3 +1,23 @@ +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2020 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + options { // Use \ u escapes in streams AND use a reader for the query From e55bcfc4652551a9a62b9a4ee1e25cb04aeed7e6 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 12 Feb 2020 19:37:37 +0100 Subject: [PATCH 0548/1003] Fix style --- .../MergeablePrefixDeclarations.java | 2 +- .../MergeablePrefixDeclarationsTest.java | 1 - .../parser/javacc/JavaCCParserBase.java | 54 +++++++------------ 3 files changed, 21 insertions(+), 36 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java index 7e8404060..fbdf49b59 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java @@ -105,7 +105,7 @@ public String unresolveAbsoluteIri(String iri) { Map matches = new HashMap<>(); prefixes.forEach((prefixName, baseIri) -> { - // only select proper prefixes here, since `eg:` is not a valid prefixed name. + // only select proper prefixes here, since `eg:` is not a valid prefixed name. if (iri.startsWith(baseIri) && !iri.equals(baseIri)) { matches.put(iri.replaceFirst(baseIri, prefixName), baseIri.length()); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java index 7070655c2..77ca77437 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java @@ -40,7 +40,6 @@ public class MergeablePrefixDeclarationsTest { private static final String EVEN_MORE_SPECIFIC = MORE_SPECIFIC + "relative/"; private static final String RELATIVE = "relative/test"; - @Before public void init() { prefixDeclarations = new MergeablePrefixDeclarations(); diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index ed2f353ba..497fa15e5 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -187,8 +187,9 @@ static String unescapeStr(String s, int line, int column) throws ParseException static String unescape(String s, char escape, int line, int column) throws ParseException { int i = s.indexOf(escape); - if (i == -1) + if (i == -1) { return s; + } // Dump the initial part straight into the string buffer StringBuilder sb = new StringBuilder(s.substring(0, i)); @@ -197,17 +198,14 @@ static String unescape(String s, char escape, int line, int column) throws Parse char ch = s.charAt(i); // Keep line and column numbers. switch (ch) { - case '\n': - case '\r': - line++; - + case '\n': + case '\r': + line++; column = 1; - break; + break; default: - column++; - break; } @@ -217,8 +215,9 @@ static String unescape(String s, char escape, int line, int column) throws Parse } // Escape - if (i >= s.length() - 1) + if (i >= s.length() - 1) { throw new ParseException("Illegal escape at end of string, line: " + line + ", column: " + column); + } char ch2 = s.charAt(i + 1); column = column + 1; i = i + 1; @@ -226,43 +225,30 @@ static String unescape(String s, char escape, int line, int column) throws Parse // Not just codepoints. Must be a legal escape. char ch3 = 0; switch (ch2) { - case 'n': - + case 'n': ch3 = '\n'; - break; - + break; case 't': - ch3 = '\t'; - break; - + break; case 'r': - ch3 = '\r'; - break; - + break; case 'b': - ch3 = '\b'; - break; - + break; case 'f': - ch3 = '\f'; - break; - + break; case '\'': - ch3 = '\''; - break; - + ch3 = '\''; + break; case '\"': - ch3 = '\"'; - break; - + ch3 = '\"'; + break; case '\\': - ch3 = '\\'; - break; - + ch3 = '\\'; + break; default: throw new ParseException("Unknown escape: \\" + ch2 + ", line: " + line + ", column: " + column); } From 532b666e59f004c1670e2e0468adef11aef261a6 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 12 Feb 2020 19:49:05 +0100 Subject: [PATCH 0549/1003] Core: Add round-trip tests for MergeablePrefixDeclarations --- .../model/MergeablePrefixDeclarationsTest.java | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java index 77ca77437..c89ad654f 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java @@ -88,6 +88,14 @@ public void resolvePrefixedName_knownPrefix_succeeds() throws PrefixDeclarationE assertEquals(prefixDeclarations.resolvePrefixedName("eg:" + RELATIVE), BASE + RELATIVE); } + @Test + public void resolvePrefixedName_unresolveAbsoluteIri_doesRoundTrip() throws PrefixDeclarationException { + prefixDeclarations.setPrefix("eg:", BASE); + String resolved = prefixDeclarations.resolvePrefixedName("eg:" + RELATIVE); + String unresolved = prefixDeclarations.unresolveAbsoluteIri(resolved); + assertEquals(prefixDeclarations.resolvePrefixedName(unresolved), resolved); + } + @Test public void setPrefix_redeclarePrefix_succeeds() { prefixDeclarations.setPrefix("eg:", BASE); @@ -160,4 +168,12 @@ public void unresolveAbsoluteIri_exactPrefixMatch_identical() { prefixDeclarations.setPrefix("eg:", BASE); assertEquals(prefixDeclarations.unresolveAbsoluteIri(BASE), BASE); } + + @Test + public void unresolveAbsoluteIri_resolvePrefixedName_doesRoundTrip() throws PrefixDeclarationException { + prefixDeclarations.setPrefix("eg:", BASE); + String unresolved = prefixDeclarations.unresolveAbsoluteIri(BASE + RELATIVE); + String resolved = prefixDeclarations.resolvePrefixedName(unresolved); + assertEquals(prefixDeclarations.unresolveAbsoluteIri(resolved), unresolved); + } } From fdb3b36372429d754e2e28bde5417cd2a9bee509 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 12 Feb 2020 20:05:02 +0100 Subject: [PATCH 0550/1003] Parser: Simplify tests for configurable literals --- .../RuleParserConfigurableLiteralTest.java | 28 ++++++------------- 1 file changed, 8 insertions(+), 20 deletions(-) diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserConfigurableLiteralTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserConfigurableLiteralTest.java index c69bceae6..8a6e31fc8 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserConfigurableLiteralTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserConfigurableLiteralTest.java @@ -28,6 +28,7 @@ import java.util.List; import java.util.stream.Collectors; +import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentMatchers; import org.semanticweb.vlog4j.core.model.api.Constant; @@ -58,6 +59,13 @@ public class RuleParserConfigurableLiteralTest { public static final ConfigurableLiteralHandler bracketHandler = getMockLiteralHandler( ConfigurableLiteralDelimiter.BRACKET, bracketConstant); + private ParserConfiguration parserConfiguration; + + @Before + public void init() { + parserConfiguration = new ParserConfiguration(); + } + @Test(expected = ParsingException.class) public void parseLiteral_unregisteredCustomLiteral_throws() throws ParsingException { RuleParser.parseLiteral("p(|test|)"); @@ -65,7 +73,6 @@ public void parseLiteral_unregisteredCustomLiteral_throws() throws ParsingExcept @Test public void registerLiteral_succeeds() throws ParsingException { - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler); assertTrue("Configurable Literal Handler has been registered", parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.PIPE)); @@ -73,14 +80,12 @@ public void registerLiteral_succeeds() throws ParsingException { @Test(expected = IllegalArgumentException.class) public void registerLiteral_duplicateHandler_throws() throws ParsingException, IllegalArgumentException { - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler) .registerLiteral(ConfigurableLiteralDelimiter.PIPE, hashHandler); } @Test public void parseLiteral_customPipeLiteral_succeeds() throws ParsingException { - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler); Literal result = RuleParser.parseLiteral("p(|test|)", parserConfiguration); assertEquals(pipeConstant, result.getConstants().toArray()[0]); @@ -88,7 +93,6 @@ public void parseLiteral_customPipeLiteral_succeeds() throws ParsingException { @Test public void parseLiteral_customHashLiteral_succeeds() throws ParsingException { - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.HASH, hashHandler); Literal result = RuleParser.parseLiteral("p(#test#)", parserConfiguration); assertEquals(hashConstant, result.getConstants().toArray()[0]); @@ -96,7 +100,6 @@ public void parseLiteral_customHashLiteral_succeeds() throws ParsingException { @Test public void parseLiteral_customParenLiteral_succeeds() throws ParsingException { - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, parenHandler); Literal result = RuleParser.parseLiteral("p((test))", parserConfiguration); assertEquals(parenConstant, result.getConstants().toArray()[0]); @@ -104,7 +107,6 @@ public void parseLiteral_customParenLiteral_succeeds() throws ParsingException { @Test public void parseLiteral_customBraceLiteral_succeeds() throws ParsingException { - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, braceHandler); Literal result = RuleParser.parseLiteral("p({test})", parserConfiguration); assertEquals(braceConstant, result.getConstants().toArray()[0]); @@ -112,7 +114,6 @@ public void parseLiteral_customBraceLiteral_succeeds() throws ParsingException { @Test public void parseLiteral_customBracketLiteral_succeeds() throws ParsingException { - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, bracketHandler); Literal result = RuleParser.parseLiteral("p([test])", parserConfiguration); assertEquals(bracketConstant, result.getConstants().toArray()[0]); @@ -120,7 +121,6 @@ public void parseLiteral_customBracketLiteral_succeeds() throws ParsingException @Test public void parseLiteral_mixedLiterals_succeeds() throws ParsingException { - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler) .registerLiteral(ConfigurableLiteralDelimiter.HASH, hashHandler) .registerLiteral(ConfigurableLiteralDelimiter.BRACKET, bracketHandler) @@ -136,7 +136,6 @@ public void parseLiteral_mixedLiterals_succeeds() throws ParsingException { public void parseLiteral_nontrivialPipeLiteral_succeeds() throws ParsingException { String label = "this is a test, do not worry."; String input = "p(|" + label + "|)"; - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); assertEquals(makeReversedConstant(label), result.getConstants().toArray()[0]); @@ -146,7 +145,6 @@ public void parseLiteral_nontrivialPipeLiteral_succeeds() throws ParsingExceptio public void parseLiteral_nestedParenLiterals_succeeds() throws ParsingException { String label = "(((this is a test, do not worry.)))"; String input = "p((" + label + "))"; - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); assertEquals(makeReversedConstant(label), result.getConstants().toArray()[0]); @@ -155,7 +153,6 @@ public void parseLiteral_nestedParenLiterals_succeeds() throws ParsingException @Test public void parseLiteral_multipleParenLiterals_succeeds() throws ParsingException { String input = "p((test), (tset))"; - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); List constants = result.getConstants().collect(Collectors.toList()); @@ -167,7 +164,6 @@ public void parseLiteral_multipleParenLiterals_succeeds() throws ParsingExceptio @Test public void parseLiteral_multipleNestedParenLiterals_succeeds() throws ParsingException { String input = "p(((test)), ((tset), (tst)))"; - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); List constants = result.getConstants().collect(Collectors.toList()); @@ -179,7 +175,6 @@ public void parseLiteral_multipleNestedParenLiterals_succeeds() throws ParsingEx @Test(expected = ParsingException.class) public void parseLiteral_mismatchedNestedParenLiteral_throws() throws ParsingException { String input = "p((test ())"; - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, reversingHandler); RuleParser.parseLiteral(input, parserConfiguration); } @@ -188,7 +183,6 @@ public void parseLiteral_mismatchedNestedParenLiteral_throws() throws ParsingExc public void parseLiteral_nestedBraceLiteral_succeeds() throws ParsingException { String label = "{{{this is a test, do not worry.}}}"; String input = "p({" + label + "})"; - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); assertEquals(makeReversedConstant(label), result.getConstants().toArray()[0]); @@ -197,7 +191,6 @@ public void parseLiteral_nestedBraceLiteral_succeeds() throws ParsingException { @Test public void parseLiteral_multipleBraceLiterals_succeeds() throws ParsingException { String input = "p({test}, {tset})"; - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); List constants = result.getConstants().collect(Collectors.toList()); @@ -209,7 +202,6 @@ public void parseLiteral_multipleBraceLiterals_succeeds() throws ParsingExceptio @Test public void parseLiteral_multipleNestedBraceLiterals_succeeds() throws ParsingException { String input = "p({{test}}, {{tset}})"; - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); List constants = result.getConstants().collect(Collectors.toList()); @@ -222,7 +214,6 @@ public void parseLiteral_multipleNestedBraceLiterals_succeeds() throws ParsingEx public void parseLiteral_nestedBracketLiteral_succeeds() throws ParsingException { String label = "[[[this is a test, do not worry.]]]"; String input = "p([" + label + "])"; - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); assertEquals(makeReversedConstant(label), result.getConstants().toArray()[0]); @@ -231,7 +222,6 @@ public void parseLiteral_nestedBracketLiteral_succeeds() throws ParsingException @Test public void parseLiteral_multipleBracketLiterals_succeeds() throws ParsingException { String input = "p([test], [tset])"; - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); List constants = result.getConstants().collect(Collectors.toList()); @@ -243,7 +233,6 @@ public void parseLiteral_multipleBracketLiterals_succeeds() throws ParsingExcept @Test public void parseLiteral_multipleNestedBracketLiterals_succeeds() throws ParsingException { String input = "p([[test]], [[tset], [tst]])"; - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); List constants = result.getConstants().collect(Collectors.toList()); @@ -254,7 +243,6 @@ public void parseLiteral_multipleNestedBracketLiterals_succeeds() throws Parsing @Test public void parseLiteral_mixedAndNestedLiterals_succeeds() throws ParsingException { - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler) .registerLiteral(ConfigurableLiteralDelimiter.HASH, hashHandler) .registerLiteral(ConfigurableLiteralDelimiter.PAREN, parenHandler) From ee05e918cab3646e129743c34f85edfdc67685af Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 12 Feb 2020 20:15:06 +0100 Subject: [PATCH 0551/1003] Core: Fix tests for MergeablePrefixDeclarations --- .../core/model/MergeablePrefixDeclarationsTest.java | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java index c89ad654f..a83ff8f3e 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java @@ -90,8 +90,9 @@ public void resolvePrefixedName_knownPrefix_succeeds() throws PrefixDeclarationE @Test public void resolvePrefixedName_unresolveAbsoluteIri_doesRoundTrip() throws PrefixDeclarationException { - prefixDeclarations.setPrefix("eg:", BASE); - String resolved = prefixDeclarations.resolvePrefixedName("eg:" + RELATIVE); + String prefix = "eg:"; + prefixDeclarations.setPrefix(prefix, BASE); + String resolved = BASE + RELATIVE; String unresolved = prefixDeclarations.unresolveAbsoluteIri(resolved); assertEquals(prefixDeclarations.resolvePrefixedName(unresolved), resolved); } @@ -171,8 +172,9 @@ public void unresolveAbsoluteIri_exactPrefixMatch_identical() { @Test public void unresolveAbsoluteIri_resolvePrefixedName_doesRoundTrip() throws PrefixDeclarationException { - prefixDeclarations.setPrefix("eg:", BASE); - String unresolved = prefixDeclarations.unresolveAbsoluteIri(BASE + RELATIVE); + String prefix = "eg:"; + prefixDeclarations.setPrefix(prefix, BASE); + String unresolved = prefix + RELATIVE; String resolved = prefixDeclarations.resolvePrefixedName(unresolved); assertEquals(prefixDeclarations.unresolveAbsoluteIri(resolved), unresolved); } From 28c71c4bc10ab6d79e2125d43c4bea895c1a05f8 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 17 Feb 2020 15:32:13 +0100 Subject: [PATCH 0552/1003] Parser: Allow inherited @base to be overridden in @import-relative --- .../vlog4j/core/reasoner/KnowledgeBase.java | 22 +++++++++++++++---- .../parser/LocalPrefixDeclarations.java | 11 +++++++++- .../semanticweb/vlog4j/parser/RuleParser.java | 8 +------ .../ImportFileDirectiveHandler.java | 9 ++------ .../ImportFileRelativeDirectiveHandler.java | 9 ++------ .../vlog4j/parser/RuleParserTest.java | 10 ++++++++- vlog4j-parser/src/test/resources/base.rls | 4 ++++ 7 files changed, 46 insertions(+), 27 deletions(-) create mode 100644 vlog4j-parser/src/test/resources/base.rls diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java index 088ffda91..b8735c4fd 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -14,7 +14,6 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.function.BiFunction; import org.apache.commons.lang3.Validate; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; @@ -444,6 +443,21 @@ Map> getFactsByPredicate() { return this.factsByPredicate; } + /** + * Interface for a method that parses the contents of a stream into a + * KnowledgeBase. + * + * This is essentially + * {@link org.semanticweb.vlog4j.parser.RuleParser#parseInto}, but we need to + * avoid a circular dependency here -- this is also why we throw + * {@link Exception} instead of + * {@link org.semanticweb.vlog4j.parser.ParsingException}. + */ + @FunctionalInterface + public interface AdditionalInputParser { + KnowledgeBase parseInto(InputStream stream, KnowledgeBase kb) throws IOException, Exception; + } + /** * Import rules from a file. * @@ -458,15 +472,15 @@ Map> getFactsByPredicate() { * * @return this */ - public KnowledgeBase importRulesFile(File file, BiFunction parseFunction) - throws RuntimeException, IOException, IllegalArgumentException { + public KnowledgeBase importRulesFile(File file, AdditionalInputParser parseFunction) + throws Exception, IOException, IllegalArgumentException { Validate.notNull(file, "file must not be null"); boolean isNewFile = importedFilePaths.add(file.getCanonicalPath()); Validate.isTrue(isNewFile, "file \"" + file.getName() + "\" was already imported."); try (InputStream stream = new FileInputStream(file)) { - return parseFunction.apply(stream, this); + return parseFunction.parseInto(stream, this); } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java index 6c234806e..8bc8a93b6 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java @@ -41,10 +41,19 @@ final public class LocalPrefixDeclarations implements PrefixDeclarations { Map prefixes = new HashMap<>(); String baseUri; + String fallbackUri; + + public LocalPrefixDeclarations() { + this(""); // empty string encodes: "no base" (use relative IRIs) + } + + public LocalPrefixDeclarations(String fallbackUri) { + this.fallbackUri = fallbackUri; + } public String getBase() { if (this.baseUri == null) { - this.baseUri = ""; // empty string encodes: "no base" (use relative IRIs) + this.baseUri = this.fallbackUri; } return baseUri.toString(); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index 93e81cb9f..cf26de098 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -58,13 +58,7 @@ public static void parseInto(final KnowledgeBase knowledgeBase, final InputStrea final JavaCCParser parser = new JavaCCParser(stream, encoding); if (baseIri != null) { - PrefixDeclarations prefixDeclarations = new LocalPrefixDeclarations(); - - try { - prefixDeclarations.setBase(baseIri); - } catch (PrefixDeclarationException e) { - throw new ParsingException("Invalid base IRI \"" + baseIri + "\"", e); - } + PrefixDeclarations prefixDeclarations = new LocalPrefixDeclarations(baseIri); parser.setPrefixDeclarations(prefixDeclarations); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileDirectiveHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileDirectiveHandler.java index 3be532ba7..af494d37c 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileDirectiveHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileDirectiveHandler.java @@ -21,7 +21,6 @@ */ import java.io.File; -import java.io.IOException; import java.io.InputStream; import java.util.List; @@ -50,14 +49,10 @@ public KnowledgeBase handleDirective(List arguments, final Su try { return knowledgeBase.importRulesFile(file, (InputStream stream, KnowledgeBase kb) -> { - try { - RuleParser.parseInto(kb, stream, parserConfiguration); - } catch (ParsingException e) { - throw new RuntimeException(e); - } + RuleParser.parseInto(kb, stream, parserConfiguration); return kb; }); - } catch (RuntimeException | IOException e) { + } catch (Exception e) { throw new ParsingException("Failed while trying to import rules file \"" + file.getName() + "\"", e); } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java index 6b9d18d4e..680c498e6 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java @@ -21,7 +21,6 @@ */ import java.io.File; -import java.io.IOException; import java.io.InputStream; import java.util.List; @@ -51,14 +50,10 @@ public KnowledgeBase handleDirective(List arguments, final Su try { return knowledgeBase.importRulesFile(file, (InputStream stream, KnowledgeBase kb) -> { - try { - RuleParser.parseInto(kb, stream, parserConfiguration, prefixDeclarations.getBase()); - } catch (ParsingException e) { - throw new RuntimeException(e); - } + RuleParser.parseInto(kb, stream, parserConfiguration, prefixDeclarations.getBase()); return kb; }); - } catch (RuntimeException | IOException e) { + } catch (Exception e) { throw new ParsingException("Failed while trying to import rules file \"" + file.getName() + "\"", e); } } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java index 78bb44d6d..a18f20aec 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java @@ -447,7 +447,7 @@ public void parse_importStatement_succeeds() throws ParsingException { } @Test - public void parse_relativeImportStatement_suceeds() throws ParsingException { + public void parse_relativeImportStatement_succeeds() throws ParsingException { String input = "@base . @import-relative \"src/test/resources/facts.rls\" ."; KnowledgeBase knowledgeBase = RuleParser.parse(input); List expected = Arrays.asList(fact1, fact3); @@ -468,4 +468,12 @@ public void parseInto_duplicateRelativeImportStatements_throws() throws ParsingE KnowledgeBase knowledgeBase = RuleParser.parse(input); RuleParser.parseInto(knowledgeBase, input); } + + public void parseInto_relativeImportRedeclaringBase_succeeds() throws ParsingException { + String input = "@base . @import-relative \"src/test/resources/base.rls\" ."; + KnowledgeBase knowledgeBase = RuleParser.parse(input); + List expected = Arrays.asList(fact1, fact2); + List result = knowledgeBase.getFacts(); + assertEquals(expected, result); + } } diff --git a/vlog4j-parser/src/test/resources/base.rls b/vlog4j-parser/src/test/resources/base.rls new file mode 100644 index 000000000..998d6d6fb --- /dev/null +++ b/vlog4j-parser/src/test/resources/base.rls @@ -0,0 +1,4 @@ +@base . + +s(ex:c) . +p("abc") . From ab37e5ecf5677c58411d293717d37d1c6c8ee1e5 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 17 Feb 2020 15:33:09 +0100 Subject: [PATCH 0553/1003] Parser: Make sure that named nulls are not parsed inside rule bodies --- .../semanticweb/vlog4j/parser/javacc/JavaCCParser.jj | 12 +++++++++--- .../vlog4j/parser/javacc/JavaCCParserBase.java | 9 +++++---- 2 files changed, 14 insertions(+), 7 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 8dcbb3baf..2043e9c7d 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -283,7 +283,13 @@ Term term(FormulaContext context) throws PrefixDeclarationException : { s = absoluteIri() { return createConstant(s); } | t = < VARORPREDNAME > { return createConstant(t.image); } | LOOKAHEAD( < NAMED_NULL >, { isParsingOfNamedNullsAllowed() }) - t = < NAMED_NULL > { return createNamedNull(t.image); } + t = < NAMED_NULL > { + if (context == FormulaContext.BODY) { + throw new ParseException("Named nulls may not appear in the body of a rule."); + } + + return createNamedNull(t.image); + } | c = NumericLiteral() { return c; } | c = RDFLiteral() { return c; } | t = < UNIVAR > { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 497fa15e5..db34b00e4 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -144,10 +144,6 @@ AbstractConstant createConstant(String lexicalForm) throws ParseException { return Expressions.makeAbstractConstant(absoluteIri); } - NamedNull createNamedNull(String lexicalForm) { - return new NamedNullImpl(lexicalForm); - } - /** * Creates a suitable {@link Constant} from the parsed data. * @@ -163,6 +159,11 @@ Constant createConstant(String lexicalForm, String datatype) throws ParseExcepti } } + NamedNull createNamedNull(String lexicalForm) { + // @todo(mx): rename into uuid + return new NamedNullImpl(lexicalForm); + } + void addStatement(Statement statement) { knowledgeBase.addStatement(statement); } From 8745efb2cf5b35e9b622914042d844df9b8b016b Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 17 Feb 2020 17:48:08 +0100 Subject: [PATCH 0554/1003] Remove failOnMissingHeaders from license plugin configuration Since failOnMissingHeaders is only relevant in dryRun mode, this doesn't change the behaviour of the plugin, but removes warnings from the build output. --- pom.xml | 3 -- .../implementation/RenamedNamedNull.java | 41 +++++++++++++++++++ .../vlog4j/parser/ParserConfiguration.java | 2 +- .../parser/javacc/JavaCCParserBase.java | 20 +++++++-- .../parser/RuleParserParseFactTest.java | 6 +-- 5 files changed, 61 insertions(+), 11 deletions(-) create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RenamedNamedNull.java diff --git a/pom.xml b/pom.xml index ded2f85ff..ed6f23606 100644 --- a/pom.xml +++ b/pom.xml @@ -136,9 +136,6 @@ src/test/java true - - true java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RenamedNamedNull.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RenamedNamedNull.java new file mode 100644 index 000000000..af1861859 --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RenamedNamedNull.java @@ -0,0 +1,41 @@ +package org.semanticweb.vlog4j.core.model.implementation; + +import java.util.UUID; + +/* + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.vlog4j.core.model.api.NamedNull; +import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; + +/** + * A {@link NamedNull} term that has been renamed during parsing. + * + * @author Maximilian Marx + */ +public class RenamedNamedNull extends NamedNullImpl { + private RenamedNamedNull(String name) { + super(name); + } + + public RenamedNamedNull(UUID name) { + this(name.toString()); + } +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index 8b766bf66..4015cb83b 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -49,7 +49,7 @@ public class ParserConfiguration { /** * Whether parsing Named Nulls is allowed. */ - private boolean allowNamedNulls = false; + private boolean allowNamedNulls = true; /** * The registered data sources. diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index db34b00e4..2a56108ef 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -1,5 +1,8 @@ package org.semanticweb.vlog4j.parser.javacc; +import java.io.ByteArrayOutputStream; +import java.io.IOException; + /*- * #%L * vlog4j-parser @@ -22,6 +25,7 @@ import java.util.HashSet; import java.util.List; +import java.util.UUID; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.AbstractConstant; @@ -34,7 +38,7 @@ import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; +import org.semanticweb.vlog4j.core.model.implementation.RenamedNamedNull; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.DefaultParserConfiguration; import org.semanticweb.vlog4j.parser.DirectiveArgument; @@ -62,6 +66,7 @@ public class JavaCCParserBase { private KnowledgeBase knowledgeBase; private ParserConfiguration parserConfiguration; + private byte[] namedNullNamespace = UUID.randomUUID().toString().getBytes(); /** * "Local" variable to remember (universal) body variables during parsing. @@ -159,9 +164,16 @@ Constant createConstant(String lexicalForm, String datatype) throws ParseExcepti } } - NamedNull createNamedNull(String lexicalForm) { - // @todo(mx): rename into uuid - return new NamedNullImpl(lexicalForm); + NamedNull createNamedNull(String lexicalForm) throws ParseException { + ByteArrayOutputStream stream = new ByteArrayOutputStream(); + try { + stream.write(namedNullNamespace); + stream.write(lexicalForm.getBytes()); + } catch (IOException e) { + throw makeParseExceptionWithCause("Failed to generate a unique name for named null", e); + } + + return new RenamedNamedNull(UUID.nameUUIDFromBytes(stream.toByteArray())); } void addStatement(Statement statement) { diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserParseFactTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserParseFactTest.java index f065eed5e..f9b55cedd 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserParseFactTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserParseFactTest.java @@ -45,12 +45,12 @@ public class RuleParserParseFactTest { @Test public void parseFact_string_succeeds() throws ParsingException { - assertEquals(RuleParser.parseFact("p(\"a\") ."), factA); + assertEquals(factA, RuleParser.parseFact("p(\"a\") .")); } @Test public void parseFact_twoStrings_succeeds() throws ParsingException { - assertEquals(RuleParser.parseFact("p(\"a\",\"b\") ."), factAB); + assertEquals(factAB, RuleParser.parseFact("p(\"a\",\"b\") .")); } @Test(expected = ParsingException.class) @@ -75,7 +75,7 @@ public void parseFact_namedNull_throws() throws ParsingException { public void parseFact_namedNullAllowed_succeeds() throws ParsingException { String input = "p(_:1) ."; ParserConfiguration parserConfiguration = new ParserConfiguration().allowNamedNulls(); - assertEquals(RuleParser.parseFact(input, parserConfiguration), fact1); + assertEquals(fact1, RuleParser.parseFact(input, parserConfiguration)); } @Test(expected = ParsingException.class) From 2ed313e9127343af0a824d0d52205c0ac9069261 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 17 Feb 2020 18:26:20 +0100 Subject: [PATCH 0555/1003] Parser: Rename named nulls during parsing --- .../parser/javacc/JavaCCParserBase.java | 8 +++ .../parser/javacc/SubParserFactory.java | 3 ++ .../parser/ParserConfigurationTest.java | 17 +++--- .../vlog4j/parser/ParserTestUtils.java | 52 +++++++++++++++++++ .../parser/RuleParserParseFactTest.java | 34 ++++++++---- .../vlog4j/parser/RuleParserTest.java | 42 +++++++++++++-- vlog4j-parser/src/test/resources/blank.rls | 1 + 7 files changed, 136 insertions(+), 21 deletions(-) create mode 100644 vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/ParserTestUtils.java create mode 100644 vlog4j-parser/src/test/resources/blank.rls diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 2a56108ef..02e9e35f6 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -326,6 +326,14 @@ public ParserConfiguration getParserConfiguration() { return parserConfiguration; } + byte[] getNamedNullNamespace() { + return namedNullNamespace; + } + + void setNamedNullNamespace(byte[] namedNullNamespace) { + this.namedNullNamespace = namedNullNamespace; + } + public void setPrefixDeclarations(PrefixDeclarations prefixDeclarations) { this.prefixDeclarations = prefixDeclarations; } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java index 75019ef77..1f1269898 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java @@ -41,6 +41,7 @@ public class SubParserFactory { private final KnowledgeBase knowledgeBase; private final ParserConfiguration parserConfiguration; private final PrefixDeclarations prefixDeclarations; + private final byte[] namedNullNamespace; /** * Construct a SubParserFactory. @@ -51,6 +52,7 @@ public class SubParserFactory { this.knowledgeBase = parser.getKnowledgeBase(); this.prefixDeclarations = parser.getPrefixDeclarations(); this.parserConfiguration = parser.getParserConfiguration(); + this.namedNullNamespace = parser.getNamedNullNamespace(); } /** @@ -67,6 +69,7 @@ public JavaCCParser makeSubParser(final InputStream inputStream, final String en subParser.setKnowledgeBase(this.knowledgeBase); subParser.setPrefixDeclarations(this.prefixDeclarations); subParser.setParserConfiguration(this.parserConfiguration); + subParser.setNamedNullNamespace(this.namedNullNamespace); return subParser; } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/ParserConfigurationTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/ParserConfigurationTest.java index e78a42ca5..236094222 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/ParserConfigurationTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/ParserConfigurationTest.java @@ -80,23 +80,24 @@ public void registerDatatype_dataSourceName_succeeds() { } @Test - public void isParsingOfNamedNullsAllowed_default_returnsFalse() { - assertFalse("named nulls are disallowed by default", parserConfiguration.isParsingOfNamedNullsAllowed()); + public void isParsingOfNamedNullsAllowed_default_returnsTrue() { + assertTrue("named nulls are allowed by default", parserConfiguration.isParsingOfNamedNullsAllowed()); } @Test - public void isParsingOfNamedNullsAllowed_enabled_returnsTrue() { - parserConfiguration.allowNamedNulls(); - assertTrue("named nulls are allowed after allowing them", parserConfiguration.isParsingOfNamedNullsAllowed()); + public void isParsingOfNamedNullsAllowed_disabled_returnsFalse() { + parserConfiguration.disallowNamedNulls(); + assertFalse("named nulls are disallowed after disallowing them", + parserConfiguration.isParsingOfNamedNullsAllowed()); } @Test - public void isParsingOfNamedNullsAllowed_enabledAndDisabled_returnsFalse() { - parserConfiguration.allowNamedNulls(); - assertTrue("named nulls are allowed after allowing them", parserConfiguration.isParsingOfNamedNullsAllowed()); + public void isParsingOfNamedNullsAllowed_disabledAndEnabled_returnsTrue() { parserConfiguration.disallowNamedNulls(); assertFalse("named nulls are disallowed after disallowing them", parserConfiguration.isParsingOfNamedNullsAllowed()); + parserConfiguration.allowNamedNulls(); + assertTrue("named nulls are allowed after allowing them", parserConfiguration.isParsingOfNamedNullsAllowed()); } @Test(expected = ParsingException.class) diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/ParserTestUtils.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/ParserTestUtils.java new file mode 100644 index 000000000..93877b1e9 --- /dev/null +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/ParserTestUtils.java @@ -0,0 +1,52 @@ +package org.semanticweb.vlog4j.parser; + +/*- + * #%L + * VLog4j Syntax + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +import static org.junit.Assert.assertTrue; + +import java.util.List; +import java.util.UUID; + +import org.semanticweb.vlog4j.core.model.api.Literal; +import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; +import org.semanticweb.vlog4j.core.model.implementation.RenamedNamedNull; + +public interface ParserTestUtils { + public default void assertUuid(String uuidLike) { + try { + UUID.fromString(uuidLike); + } catch (IllegalArgumentException e) { + throw new AssertionError("expected a valid UUID, but got \"" + uuidLike + "\"", e); + } + } + + public default void assertArgumentIsNamedNull(Literal literal, int argument) { + List arguments = literal.getArguments(); + assertTrue("argument is positive", argument >= 1); + assertTrue("argument is a valid position", argument <= arguments.size()); + Term term = arguments.get(argument - 1); + assertTrue("argument is a named null", term instanceof NamedNullImpl); + + if (term instanceof RenamedNamedNull) { + assertUuid(term.getName()); + } + } +} diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserParseFactTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserParseFactTest.java index f9b55cedd..e2a56b1ce 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserParseFactTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserParseFactTest.java @@ -25,23 +25,23 @@ import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.NamedNull; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; import org.semanticweb.vlog4j.parser.ParserConfiguration; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; -public class RuleParserParseFactTest { +public class RuleParserParseFactTest implements ParserTestUtils { private final Constant a = Expressions.makeDatatypeConstant("a", PrefixDeclarations.XSD_STRING); private final Constant b = Expressions.makeDatatypeConstant("b", PrefixDeclarations.XSD_STRING); - private final NamedNull null1 = new NamedNullImpl("1"); private final Fact factA = Expressions.makeFact("p", a); private final Fact factAB = Expressions.makeFact("p", a, b); - private final Fact fact1 = Expressions.makeFact("p", null1); @Test public void parseFact_string_succeeds() throws ParsingException { @@ -66,22 +66,36 @@ public void parseFact_arityZeroFact_throws() throws ParsingException { } @Test(expected = ParsingException.class) - public void parseFact_namedNull_throws() throws ParsingException { + public void parseFact_namedNullDisallowed_throws() throws ParsingException { String input = "p(_:1) ."; - RuleParser.parseFact(input); + ParserConfiguration parserConfiguration = new ParserConfiguration().disallowNamedNulls(); + RuleParser.parseFact(input, parserConfiguration); } @Test - public void parseFact_namedNullAllowed_succeeds() throws ParsingException { + public void parseFact_namedNull_succeeds() throws ParsingException { String input = "p(_:1) ."; - ParserConfiguration parserConfiguration = new ParserConfiguration().allowNamedNulls(); - assertEquals(fact1, RuleParser.parseFact(input, parserConfiguration)); + Fact result = RuleParser.parseFact(input); + assertArgumentIsNamedNull(result, 1); } @Test(expected = ParsingException.class) public void parseFact_namedNullAsPredicateName_throws() throws ParsingException { String input = "_:p(\"a\") ."; - ParserConfiguration parserConfiguration = new ParserConfiguration().allowNamedNulls(); - RuleParser.parseFact(input, parserConfiguration); + RuleParser.parseFact(input); + } + + @Test(expected = ParsingException.class) + public void parseRule_namedNullInBody_throws() throws ParsingException { + String input = "q(_:head) :- p(_:body) ."; + RuleParser.parseRule(input); + } + + @Test + public void parseRule_namedNullInHead_succeeds() throws ParsingException { + String input = "q(_:head) :- p(\"a\") ."; + Rule result = RuleParser.parseRule(input); + Literal literal = result.getHead().getLiterals().get(0); + assertArgumentIsNamedNull(literal, 1); } } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java index a18f20aec..8c2f274e2 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java @@ -42,7 +42,7 @@ import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -public class RuleParserTest { +public class RuleParserTest implements ParserTestUtils { private final Variable x = Expressions.makeUniversalVariable("X"); private final Variable y = Expressions.makeExistentialVariable("Y"); @@ -373,10 +373,14 @@ public void testBlankPrefixDeclaration() throws ParsingException { RuleParser.parse(input); } - @Test(expected = ParsingException.class) + @Test public void testBlankNodeTerm() throws ParsingException { String input = "(_:blank) ."; - RuleParser.parse(input); + KnowledgeBase result = RuleParser.parse(input); + List facts = result.getFacts(); + + assertEquals(1, facts.size()); + assertArgumentIsNamedNull(facts.get(0), 1); } @Test(expected = ParsingException.class) @@ -455,6 +459,38 @@ public void parse_relativeImportStatement_succeeds() throws ParsingException { assertEquals(expected, result); } + @Test + public void parse_import_renamesNamedNulls() throws ParsingException { + String input = "p(_:blank) . @import \"src/test/resources/blank.rls\" ."; + KnowledgeBase knowledgeBase = RuleParser.parse(input); + List facts = knowledgeBase.getFacts(); + assertEquals(2, facts.size()); + Fact fact1 = facts.get(0); + Fact fact2 = facts.get(1); + + assertNotEquals(fact1, fact2); + assertArgumentIsNamedNull(fact1, 1); + assertArgumentIsNamedNull(fact2, 1); + } + + @Test + public void parse_reusedNamedNulls_identical() throws ParsingException { + String input = "p(_:blank) . q(_:blank) . p(_:other) ."; + + KnowledgeBase knowledgeBase = RuleParser.parse(input); + List facts = knowledgeBase.getFacts(); + assertEquals(3, facts.size()); + Fact fact1 = facts.get(0); + Fact fact2 = facts.get(1); + Fact fact3 = facts.get(2); + + assertEquals(fact1.getArguments().get(0), fact2.getArguments().get(0)); + assertNotEquals(fact1.getArguments().get(0), fact3.getArguments().get(0)); + assertArgumentIsNamedNull(fact1, 1); + assertArgumentIsNamedNull(fact2, 1); + assertArgumentIsNamedNull(fact3, 1); + } + @Test(expected = ParsingException.class) public void parseInto_duplicateImportStatements_throws() throws ParsingException { String input = "@import \"src/test/resources/facts.rls\" . "; diff --git a/vlog4j-parser/src/test/resources/blank.rls b/vlog4j-parser/src/test/resources/blank.rls new file mode 100644 index 000000000..cc44c5389 --- /dev/null +++ b/vlog4j-parser/src/test/resources/blank.rls @@ -0,0 +1 @@ +p(_:blank) . From 4271c7df5ba7da89b956c2ee5e299c1b680fbbcc Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 17 Feb 2020 20:36:01 +0100 Subject: [PATCH 0556/1003] Parser: Add more tests --- .../semanticweb/vlog4j/parser/RuleParser.java | 13 +-- .../vlog4j/parser/DirectiveArgumentTest.java | 84 +++++++++++++++++++ .../parser/RuleParserDataSourceTest.java | 40 ++++++++- .../vlog4j/parser/RuleParserTest.java | 29 ++++++- vlog4j-parser/src/test/resources/base.rls | 2 +- 5 files changed, 158 insertions(+), 10 deletions(-) create mode 100644 vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/DirectiveArgumentTest.java diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index cf26de098..e7784822d 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -53,6 +53,9 @@ public class RuleParser { private static Logger LOGGER = LoggerFactory.getLogger(RuleParser.class); + private RuleParser() { + } + public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, final String encoding, final ParserConfiguration parserConfiguration, final String baseIri) throws ParsingException { final JavaCCParser parser = new JavaCCParser(stream, encoding); @@ -85,13 +88,13 @@ public static void parseInto(final KnowledgeBase knowledgeBase, final InputStrea public static void parseInto(final KnowledgeBase knowledgeBase, final String input, final ParserConfiguration parserConfiguration, final String baseIri) throws ParsingException { final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); - parseInto(knowledgeBase, inputStream, DEFAULT_STRING_ENCODING, parserConfiguration, baseIri); + parseInto(knowledgeBase, inputStream, parserConfiguration, baseIri); } public static void parseInto(final KnowledgeBase knowledgeBase, final String input, final ParserConfiguration parserConfiguration) throws ParsingException { final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); - parseInto(knowledgeBase, inputStream, DEFAULT_STRING_ENCODING, parserConfiguration); + parseInto(knowledgeBase, inputStream, parserConfiguration); } public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, final String encoding) @@ -107,7 +110,7 @@ public static void parseInto(final KnowledgeBase knowledgeBase, final InputStrea public static void parseInto(final KnowledgeBase knowledgeBase, final String input) throws ParsingException { final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); - parseInto(knowledgeBase, inputStream, DEFAULT_STRING_ENCODING); + parseInto(knowledgeBase, inputStream); } public static KnowledgeBase parse(final InputStream stream, final String encoding, @@ -125,7 +128,7 @@ public static KnowledgeBase parse(final InputStream stream, final ParserConfigur public static KnowledgeBase parse(final String input, final ParserConfiguration parserConfiguration) throws ParsingException { final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); - return parse(inputStream, DEFAULT_STRING_ENCODING, parserConfiguration); + return parse(inputStream, parserConfiguration); } public static KnowledgeBase parse(final InputStream stream, final String encoding) throws ParsingException { @@ -138,7 +141,7 @@ public static KnowledgeBase parse(final InputStream stream) throws ParsingExcept public static KnowledgeBase parse(final String input) throws ParsingException { final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); - return parse(inputStream, DEFAULT_STRING_ENCODING); + return parse(inputStream); } /** diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/DirectiveArgumentTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/DirectiveArgumentTest.java new file mode 100644 index 000000000..ccb84bc4f --- /dev/null +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/DirectiveArgumentTest.java @@ -0,0 +1,84 @@ +package org.semanticweb.vlog4j.parser; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +import static org.junit.Assert.*; + +import java.net.URI; + +import org.junit.Test; +import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; + +public class DirectiveArgumentTest { + private static final String STRING = "src/test/resources/facts.rls"; + private static final URI IRI = URI.create("https://example.org"); + private static final Term TERM = Expressions.makeDatatypeConstant(STRING, IRI.toString()); + + private static final DirectiveArgument STRING_ARGUMENT = DirectiveArgument.string(STRING); + private static final DirectiveArgument IRI_ARGUMENT = DirectiveArgument.iri(IRI); + private static final DirectiveArgument TERM_ARGUMENT = DirectiveArgument.term(TERM); + + @Test + public void equals_null_returnsFalse() { + assertFalse(STRING_ARGUMENT.equals(null)); + assertFalse(IRI_ARGUMENT.equals(null)); + assertFalse(TERM_ARGUMENT.equals(null)); + } + + @Test + public void equals_self_returnsTrue() { + assertTrue(STRING_ARGUMENT.equals(STRING_ARGUMENT)); + assertTrue(IRI_ARGUMENT.equals(IRI_ARGUMENT)); + assertTrue(TERM_ARGUMENT.equals(TERM_ARGUMENT)); + } + + @Test + public void equals_equal_returnsTrue() { + assertTrue(STRING_ARGUMENT.equals(DirectiveArgument.string(STRING))); + assertTrue(IRI_ARGUMENT.equals(DirectiveArgument.iri(IRI))); + assertTrue(TERM_ARGUMENT.equals(DirectiveArgument.term(TERM))); + } + + @Test + public void equals_notEqualButSameType_returnsFalse() { + assertFalse(STRING_ARGUMENT.equals(DirectiveArgument.string(STRING + "test"))); + assertFalse(IRI_ARGUMENT.equals(DirectiveArgument.iri(URI.create("https://example.com")))); + assertFalse(TERM_ARGUMENT + .equals(DirectiveArgument.term(Expressions.makeDatatypeConstant(STRING, "https://example.com")))); + } + + @Test + public void equals_differentType_returnsFalse() { + assertFalse(STRING_ARGUMENT.equals(IRI_ARGUMENT)); + assertFalse(STRING_ARGUMENT.equals(TERM_ARGUMENT)); + assertFalse(IRI_ARGUMENT.equals(STRING_ARGUMENT)); + assertFalse(IRI_ARGUMENT.equals(TERM_ARGUMENT)); + assertFalse(TERM_ARGUMENT.equals(STRING_ARGUMENT)); + assertFalse(TERM_ARGUMENT.equals(IRI_ARGUMENT)); + } + + @Test + public void equals_String_returnsFalse() { + assertFalse(STRING_ARGUMENT.equals((Object) "test")); + assertFalse(IRI_ARGUMENT.equals((Object) "test")); + assertFalse(TERM_ARGUMENT.equals((Object) "test")); + } +} diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserDataSourceTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserDataSourceTest.java index c42975d90..84b0f2d65 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserDataSourceTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserDataSourceTest.java @@ -32,6 +32,7 @@ import org.junit.Test; import org.mockito.ArgumentMatchers; +import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; @@ -146,7 +147,7 @@ public void testCustomDataSource() throws ParsingException { String input = "@source p[2] : mock-source(\"hello\", \"world\") ."; List expectedArguments = Arrays.asList(DirectiveArgument.string("hello"), DirectiveArgument.string("world")); - RuleParser.parseDataSourceDeclaration(input, parserConfiguration); + RuleParser.parse(input, parserConfiguration); verify(handler).handleDirective(ArgumentMatchers.eq(expectedArguments), ArgumentMatchers.any()); @@ -194,4 +195,41 @@ public void sparqlDataSourceDeclaration_invalidNumberOfArguments_throws() throws public void parseDataSourceDeclaration_windowsStylePathName_succeeds() throws ParsingException, IOException { RuleParser.parseDataSourceDeclaration("@source p[1] : load-csv(\"\\\\test\\\\with\\\\backslashes.csv\") ."); } + + class DuplicatingDataSourceDeclarationHandler implements DataSourceDeclarationHandler { + public DataSource handleDirective(List arguments, SubParserFactory subParserFactory) + throws ParsingException { + CsvFileDataSource source; + try { + source = new CsvFileDataSource(new File(EXAMPLE_CSV_FILE_PATH)); + } catch (IOException e) { + throw new ParsingException(e); + } + + KnowledgeBase knowledgeBase = getKnowledgeBase(subParserFactory); + ParserConfiguration parserConfiguration = getParserConfiguration(subParserFactory); + RuleParser.parseInto(knowledgeBase, "@source q[2] : load-csv(\"" + EXAMPLE_CSV_FILE_PATH + "\") .", + parserConfiguration); + + return source; + } + } + + @Test + public void parseInto_mockDataSourceWithBase_succeeds() throws ParsingException { + ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + parserConfiguration.registerDataSource("mock-source", new DuplicatingDataSourceDeclarationHandler()); + String input = "@source p[2] : mock-source(\"hello\", \"world\") ."; + KnowledgeBase knowledgeBase = new KnowledgeBase(); + RuleParser.parseInto(knowledgeBase, input, parserConfiguration, "https://example.org"); + assertEquals(2, knowledgeBase.getStatements().size()); + } + + @Test(expected = ParsingException.class) + public void parseDataSourceDeclaration_unexpectedlyAddsTwoDatasources_throws() throws ParsingException { + ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + parserConfiguration.registerDataSource("mock-source", new DuplicatingDataSourceDeclarationHandler()); + String input = "@source p[2] : mock-source(\"hello\", \"world\") ."; + RuleParser.parseDataSourceDeclaration(input, parserConfiguration); + } } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java index 8c2f274e2..25243d0cb 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java @@ -37,10 +37,12 @@ import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Statement; +import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase.FormulaContext; public class RuleParserTest implements ParserTestUtils { @@ -374,7 +376,7 @@ public void testBlankPrefixDeclaration() throws ParsingException { } @Test - public void testBlankNodeTerm() throws ParsingException { + public void parse_NamedNullInFact_succeeds() throws ParsingException { String input = "(_:blank) ."; KnowledgeBase result = RuleParser.parse(input); List facts = result.getFacts(); @@ -383,8 +385,28 @@ public void testBlankNodeTerm() throws ParsingException { assertArgumentIsNamedNull(facts.get(0), 1); } + @Test + public void parseTerm_NamedNull_succeeds() throws ParsingException { + String input = "_:blank"; + Term result = RuleParser.parseTerm(input); + assertUuid(result.getName()); + } + + @Test + public void parseTerm_NamedNullInHead_succeeds() throws ParsingException { + String input = "_:blank"; + Term result = RuleParser.parseTerm(input, FormulaContext.HEAD); + assertUuid(result.getName()); + } + @Test(expected = ParsingException.class) - public void testBlankPredicateName() throws ParsingException { + public void parseTerm_NamedNullInBodyContext_throws() throws ParsingException { + String input = "_:blank"; + RuleParser.parseTerm(input, FormulaContext.BODY); + } + + @Test(expected = ParsingException.class) + public void testBParsingExceptione() throws ParsingException { String input = "_:(a) ."; RuleParser.parse(input); } @@ -505,10 +527,11 @@ public void parseInto_duplicateRelativeImportStatements_throws() throws ParsingE RuleParser.parseInto(knowledgeBase, input); } + @Test public void parseInto_relativeImportRedeclaringBase_succeeds() throws ParsingException { String input = "@base . @import-relative \"src/test/resources/base.rls\" ."; KnowledgeBase knowledgeBase = RuleParser.parse(input); - List expected = Arrays.asList(fact1, fact2); + List expected = Arrays.asList(fact1, fact3); List result = knowledgeBase.getFacts(); assertEquals(expected, result); } diff --git a/vlog4j-parser/src/test/resources/base.rls b/vlog4j-parser/src/test/resources/base.rls index 998d6d6fb..69e925147 100644 --- a/vlog4j-parser/src/test/resources/base.rls +++ b/vlog4j-parser/src/test/resources/base.rls @@ -1,4 +1,4 @@ @base . -s(ex:c) . +s(c) . p("abc") . From 0fbdacdd288c673fcf810e2b0ce539a1dfc2cbb3 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 18 Feb 2020 10:33:55 +0100 Subject: [PATCH 0557/1003] Core: Fix spelling of VLogReasoner#getKnowledgeBasePredicates --- .../vlog4j/core/reasoner/implementation/VLogReasoner.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index b48b19f50..690eb2019 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -462,7 +462,7 @@ public Correctness writeInferences(OutputStream stream) throws IOException { throw new ReasonerStateException(this.reasonerState, "Obtaining inferences is not alowed before reasoner is loaded!"); } - final Set toBeQueriedHeadPredicates = getKnolwedgeBasePredicates(); + final Set toBeQueriedHeadPredicates = getKnowledgeBasePredicates(); for (final Predicate predicate : toBeQueriedHeadPredicates) { final PositiveLiteral queryAtom = getQueryAtom(predicate); @@ -619,7 +619,7 @@ public void onStatementsRemoved(List statementsRemoved) { updateCorrectnessOnStatementsRemoved(); } - Set getKnolwedgeBasePredicates() { + Set getKnowledgeBasePredicates() { final Set toBeQueriedHeadPredicates = new HashSet<>(); for (final Rule rule : this.knowledgeBase.getRules()) { for (final Literal literal : rule.getHead()) { From 7c9b6ab3796f1a15ccf8b679373869553e4a6cf2 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 18 Feb 2020 15:28:29 +0100 Subject: [PATCH 0558/1003] Core: Abbreviate IRIs in exported facts using known prefixes --- .../core/model/api/AbstractConstant.java | 16 ++- .../core/model/api/DatatypeConstant.java | 23 +-- .../core/model/api/ExistentialVariable.java | 14 +- .../model/api/LanguageStringConstant.java | 23 +-- .../vlog4j/core/model/api/NamedNull.java | 14 +- .../vlog4j/core/model/api/Predicate.java | 113 ++++++++------- .../vlog4j/core/model/api/Term.java | 25 +++- .../core/model/api/UniversalVariable.java | 15 +- .../MergeablePrefixDeclarations.java | 12 +- .../core/model/implementation/Serializer.java | 110 +++++++++++++-- .../vlog4j/core/reasoner/KnowledgeBase.java | 9 ++ .../vlog4j/core/reasoner/Reasoner.java | 41 +++--- .../reasoner/implementation/VLogReasoner.java | 13 +- .../MergeablePrefixDeclarationsTest.java | 50 ++++--- .../VLogReasonerWriteInferencesTest.java | 133 ++++++++++++++++++ .../VlogReasonerWriteInferencesTest.java | 101 ------------- 16 files changed, 455 insertions(+), 257 deletions(-) create mode 100644 vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java delete mode 100644 vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VlogReasonerWriteInferencesTest.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/AbstractConstant.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/AbstractConstant.java index 760b74358..41f7bd3ce 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/AbstractConstant.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/AbstractConstant.java @@ -1,7 +1,5 @@ package org.semanticweb.vlog4j.core.model.api; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; - /*- * #%L * VLog4j Core Components @@ -11,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,11 +20,15 @@ * #L% */ +import java.util.function.Function; + +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /** * Interface for abstract constants, i.e. for constants that represent an * abstract domain element (in contrast to a specific value of a concrete * datatype). Such terms are of type {@link TermType#ABSTRACT_CONSTANT}. - * + * * @author Markus Kroetzsch */ public interface AbstractConstant extends Constant { @@ -41,4 +43,8 @@ default String getSyntacticRepresentation() { return Serializer.getString(this); } + @Override + default String getSyntacticRepresentation(Function iriTransformer) { + return Serializer.getString(this, iriTransformer); + } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DatatypeConstant.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DatatypeConstant.java index 3702b7b52..d4dfe19e8 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DatatypeConstant.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DatatypeConstant.java @@ -1,7 +1,5 @@ package org.semanticweb.vlog4j.core.model.api; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; - /*- * #%L * VLog4j Core Components @@ -11,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,16 +20,20 @@ * #L% */ +import java.util.function.Function; + +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /** * Interface for datatype constants, i.e. for constants that represent a * specific value of a concrete datatype). Such terms are of type * {@link TermType#DATATYPE_CONSTANT}. - * + * * Note that datatype literal is a common name of the representation of * specific values for a datatype. We mostly avoid this meaning of * literal since a literal in logic is typically a negated or non-negated * atom. - * + * * @author Markus Kroetzsch */ public interface DatatypeConstant extends Constant { @@ -44,7 +46,7 @@ default TermType getType() { /** * Returns the datatype of this term, which is typically an IRI that defines how * to interpret the lexical value. - * + * * @return a non-blank String (not null, nor empty or whitespace). */ String getDatatype(); @@ -55,7 +57,7 @@ default TermType getType() { * strings that represent the same value, depending on the rules of the * datatype, and that there the value used here does not have to be a canonical * representation. - * + * * @return a non-null string */ String getLexicalValue(); @@ -64,4 +66,9 @@ default TermType getType() { default String getSyntacticRepresentation() { return Serializer.getString(this); } + + @Override + default String getSyntacticRepresentation(Function iriTransformer) { + return Serializer.getString(this, iriTransformer); + } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/ExistentialVariable.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/ExistentialVariable.java index d573a7850..c62b7ce1b 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/ExistentialVariable.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/ExistentialVariable.java @@ -1,7 +1,5 @@ package org.semanticweb.vlog4j.core.model.api; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; - /*- * #%L * VLog4j Core Components @@ -11,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,6 +20,10 @@ * #L% */ +import java.util.function.Function; + +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /** * Interface for existentially quantified variables, i.e., variables that appear * in the scope of an (implicit) existential quantifier in a rule. @@ -40,4 +42,8 @@ default String getSyntacticRepresentation() { return Serializer.getString(this); } + @Override + default String getSyntacticRepresentation(Function iriTransformer) { + return getSyntacticRepresentation(); + } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java index 2c150b507..c38b0f901 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java @@ -1,7 +1,5 @@ package org.semanticweb.vlog4j.core.model.api; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; - /*- * #%L * VLog4j Core Components @@ -11,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,12 +20,16 @@ * #L% */ +import java.util.function.Function; + +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /** * Interface for string constants with a language tag, used to represent values * of type http://www.w3.org/1999/02/22-rdf-syntax-ns#langString in RDF, OWL, * and related languages used with knowledge graphs. Such terms are of type * {@link TermType#LANGSTRING_CONSTANT}. - * + * * @author Markus Kroetzsch */ public interface LanguageStringConstant extends Constant { @@ -40,7 +42,7 @@ default TermType getType() { /** * Returns the datatype of this term, which is always * http://www.w3.org/1999/02/22-rdf-syntax-ns#langString. - * + * * @return a IRI of RDF langString datatype */ default String getDatatype() { @@ -49,7 +51,7 @@ default String getDatatype() { /** * Returns the string value of the literal without the language tag. - * + * * @return a non-null string */ String getString(); @@ -58,7 +60,7 @@ default String getDatatype() { * Returns the language tag of the literal, which should be a lowercase string * that conforms to the BCP 47 * specification. - * + * * @return a non-empty string */ String getLanguageTag(); @@ -67,4 +69,9 @@ default String getDatatype() { default String getSyntacticRepresentation() { return Serializer.getConstantName(this); } + + @Override + default String getSyntacticRepresentation(Function iriTransformer) { + return getSyntacticRepresentation(); + } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/NamedNull.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/NamedNull.java index 5413b9365..297692483 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/NamedNull.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/NamedNull.java @@ -1,7 +1,5 @@ package org.semanticweb.vlog4j.core.model.api; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; - /* * #%L * VLog4j Core Components @@ -11,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,6 +20,10 @@ * #L% */ +import java.util.function.Function; + +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /** * Interface for {@link TermType#NAMED_NULL} terms. A blank is an entity used to * represent anonymous domain elements introduced during the reasoning process @@ -41,4 +43,8 @@ default String getSyntacticRepresentation() { return Serializer.getString(this); } + @Override + default String getSyntacticRepresentation(Function iriTransformer) { + return getSyntacticRepresentation(); + } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java index 7b604f289..c7b92761b 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java @@ -1,57 +1,56 @@ -package org.semanticweb.vlog4j.core.model.api; - -import org.semanticweb.vlog4j.core.model.implementation.Serializer; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -/** - * A Predicate represents a relation between terms. Is uniquely identified by - * its name and arity. The arity determines the number of terms allowed in the - * relation. For example, a Predicate with name {@code P} and arity {@code n} - * allows atomic formulae of the form {@code P(t1,...,tn)}. - * - * @author Irina Dragoste - * - */ -public interface Predicate extends Entity { - - /** - * The name of the Predicate. - * - * @return the name of the Predicate. - */ - String getName(); - - /** - * The arity represents the number of terms allowed as relation arguments for - * this Predicate. For example, a Predicate with name {@code P} and arity - * {@code n} allows atomic formulae of the form {@code P(t1,...,tn)}. - * - * @return the arity of the Predicate. - */ - int getArity(); - - @Override - default String getSyntacticRepresentation() { - return Serializer.getString(this); - } - -} +package org.semanticweb.vlog4j.core.model.api; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + +/** + * A Predicate represents a relation between terms. Is uniquely identified by + * its name and arity. The arity determines the number of terms allowed in the + * relation. For example, a Predicate with name {@code P} and arity {@code n} + * allows atomic formulae of the form {@code P(t1,...,tn)}. + * + * @author Irina Dragoste + * + */ +public interface Predicate extends Entity { + + /** + * The name of the Predicate. + * + * @return the name of the Predicate. + */ + String getName(); + + /** + * The arity represents the number of terms allowed as relation arguments for + * this Predicate. For example, a Predicate with name {@code P} and arity + * {@code n} allows atomic formulae of the form {@code P(t1,...,tn)}. + * + * @return the arity of the Predicate. + */ + int getArity(); + + @Override + default String getSyntacticRepresentation() { + return Serializer.getString(this); + } +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Term.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Term.java index 582493c69..54843893b 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Term.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Term.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,6 +20,10 @@ * #L% */ +import java.util.function.Function; + +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /** * Interface for terms. A term is characterized by a string name and a * {@link TermType}. @@ -32,21 +36,21 @@ public interface Term extends Entity { /** * Returns the name this term. The name uniquely identifies terms of the same * {@link TermType}. - * + * * @return a non-blank String (not null, nor empty or whitespace). */ String getName(); /** * Return the type of this term. - * + * * @return the type of this term */ TermType getType(); /** * Returns true if the term represents some kind of constant. - * + * * @return true if term is constant */ default boolean isConstant() { @@ -56,7 +60,7 @@ default boolean isConstant() { /** * Returns true if the term represents some kind of variable. - * + * * @return true if term is variable */ default boolean isVariable() { @@ -71,4 +75,13 @@ default boolean isVariable() { */ T accept(TermVisitor termVisitor); + /** + * Return the parseable string representation of this Term, transforming IRIs. + * + * @param iriTransformer a function that is applied to transform any IRIs + * occurring in the output. + * + * @return non-empty String containing the representation. + */ + String getSyntacticRepresentation(Function iriTransformer); } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/UniversalVariable.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/UniversalVariable.java index 975620a03..66e2d38a3 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/UniversalVariable.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/UniversalVariable.java @@ -1,7 +1,5 @@ package org.semanticweb.vlog4j.core.model.api; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; - /* * #%L * VLog4j Core Components @@ -11,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,6 +20,10 @@ * #L% */ +import java.util.function.Function; + +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /** * Interface for universally quantified variables, i.e., variables that appear * in the scope of an (implicit) universal quantifier in a rule. @@ -39,4 +41,9 @@ default TermType getType() { default String getSyntacticRepresentation() { return Serializer.getString(this); } + + @Override + default String getSyntacticRepresentation(Function iriTransformer) { + return getSyntacticRepresentation(); + } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java index fbdf49b59..dbea5e454 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java @@ -104,10 +104,14 @@ public String resolvePrefixedName(String prefixedName) throws PrefixDeclarationE public String unresolveAbsoluteIri(String iri) { Map matches = new HashMap<>(); - prefixes.forEach((prefixName, baseIri) -> { + if (baseUri != EMPTY_BASE_PREFIX && iri.startsWith(baseUri) && !iri.equals(baseUri)) { + matches.put(iri.replaceFirst(baseUri, ""), baseUri.length()); + } + + prefixes.forEach((prefixName, prefixIri) -> { // only select proper prefixes here, since `eg:` is not a valid prefixed name. - if (iri.startsWith(baseIri) && !iri.equals(baseIri)) { - matches.put(iri.replaceFirst(baseIri, prefixName), baseIri.length()); + if (iri.startsWith(prefixIri) && !iri.equals(prefixIri)) { + matches.put(iri.replaceFirst(prefixIri, prefixName), prefixIri.length()); } }); @@ -150,6 +154,8 @@ public Iterator iterator() { * @return this */ public MergeablePrefixDeclarations mergePrefixDeclarations(final PrefixDeclarations other) { + this.setBase(other.getBase()); + for (String prefixName : other) { String iri; try { diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index b9f3c2b51..8d0325fd1 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -1,7 +1,9 @@ package org.semanticweb.vlog4j.core.model.implementation; import java.util.List; +import java.util.function.Function; +import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.AbstractConstant; /*- @@ -38,6 +40,7 @@ import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.UniversalVariable; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.FileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; @@ -65,6 +68,8 @@ public final class Serializer { public static final String RULE_SEPARATOR = " :- "; public static final char AT = '@'; public static final String DATA_SOURCE = "@source "; + public static final String BASE = "@base "; + public static final String PREFIX = "@prefix "; public static final String CSV_FILE_DATA_SOURCE = "load-csv"; public static final String RDF_FILE_DATA_SOURCE = "load-rdf"; public static final String SPARQL_QUERY_RESULT_DATA_SOURCE = "sparql"; @@ -148,6 +153,18 @@ public static String getFactString(final Fact fact) { return getString(fact) + STATEMENT_SEPARATOR; } + /** + * Creates a String representation of a given {@link Constant}. + * + * @see Rule syntax . + * @param constant a {@link Constant} + * @param iriTransformer a function to transform IRIs with. + * @return String representation corresponding to a given {@link Constant}. + */ + public static String getString(final AbstractConstant constant, Function iriTransformer) { + return getIRIString(constant.getName(), iriTransformer); + } + /** * Creates a String representation of a given {@link Constant}. * @@ -178,23 +195,49 @@ public static String getConstantName(final LanguageStringConstant languageString * * @see Rule syntax . * @param datatypeConstant a {@link DatatypeConstant} + * @param iriTransformer a function to transform IRIs with. * @return String representation corresponding to a given * {@link DatatypeConstant}. */ - public static String getString(final DatatypeConstant datatypeConstant) { + public static String getString(final DatatypeConstant datatypeConstant, Function iriTransformer) { if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_STRING)) { return getString(datatypeConstant.getLexicalValue()); + } else if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DECIMAL) + || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_INTEGER) + || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DOUBLE)) { + return datatypeConstant.getLexicalValue(); } else { - if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DECIMAL) - || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_INTEGER) - || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DOUBLE)) { - return datatypeConstant.getLexicalValue(); - } else { - return getConstantName(datatypeConstant); - } + return getConstantName(datatypeConstant, iriTransformer); } } + /** + * Creates a String representation corresponding to the name of a given + * {@link DatatypeConstant} without an IRI. + * + * @see Rule syntax . + * @param datatypeConstant a {@link DatatypeConstant} + * @return String representation corresponding to a given + * {@link DatatypeConstant}. + */ + public static String getString(final DatatypeConstant datatypeConstant) { + return getString(datatypeConstant, Function.identity()); + } + + /** + * Creates a String representation corresponding to the name of a given + * {@link DatatypeConstant} including an IRI. + * + * @see Rule syntax . + * @param datatypeConstant a {@link DatatypeConstant} + * @return String representation corresponding to a given + * {@link DatatypeConstant}. + */ + public static String getConstantName(final DatatypeConstant datatypeConstant, Function iriTransformer) { + return getString(datatypeConstant.getLexicalValue()) + DOUBLE_CARET + + getIRIString(datatypeConstant.getDatatype(), iriTransformer); + } + /** * Creates a String representation corresponding to the name of a given * {@link DatatypeConstant} including an IRI. @@ -318,6 +361,16 @@ private static String getFileString(final FileDataSource fileDataSource) { } private static String getIRIString(final String string) { + return getIRIString(string, Function.identity()); + } + + private static String getIRIString(final String string, Function iriTransformer) { + String transformed = iriTransformer.apply(string); + + if (!transformed.equals(string)) { + return transformed; + } + if (string.contains(COLON) || string.matches(REGEX_INTEGER) || string.matches(REGEX_DOUBLE) || string.matches(REGEX_DECIMAL) || string.equals(REGEX_TRUE) || string.equals(REGEX_FALSE)) { return addAngleBrackets(string); @@ -382,12 +435,20 @@ private static String addAngleBrackets(final String string) { public static String getFactString(Predicate predicate, List terms) { return getString(predicate, terms) + STATEMENT_SEPARATOR + NEW_LINE; + } + public static String getFactString(Predicate predicate, List terms, Function iriTransformer) { + return getString(predicate, terms, iriTransformer) + STATEMENT_SEPARATOR + "\n"; } public static String getString(Predicate predicate, List terms) { - final StringBuilder stringBuilder = new StringBuilder(getIRIString(predicate.getName())); + return getString(predicate, terms, Function.identity()); + } + + public static String getString(Predicate predicate, List terms, Function iriTransformer) { + final StringBuilder stringBuilder = new StringBuilder(getIRIString(predicate.getName(), iriTransformer)); stringBuilder.append(OPENING_PARENTHESIS); + boolean first = true; for (final Term term : terms) { if (first) { @@ -395,12 +456,41 @@ public static String getString(Predicate predicate, List terms) { } else { stringBuilder.append(COMMA); } - final String string = term.getSyntacticRepresentation(); + final String string = term.getSyntacticRepresentation(iriTransformer); stringBuilder.append(string); } stringBuilder.append(CLOSING_PARENTHESIS); return stringBuilder.toString(); + } + + public static String getBaseString(KnowledgeBase knowledgeBase) { + String baseIri = knowledgeBase.getBase(); + + if (baseIri.equals("")) { + return ""; + } + return BASE + addAngleBrackets(baseIri) + STATEMENT_SEPARATOR + "\n"; } + public static String getPrefixString(String prefixName, String prefixIri) { + return PREFIX + prefixName + " " + addAngleBrackets(prefixIri) + STATEMENT_SEPARATOR + "\n"; + } + + public static String getBaseAndPrefixDeclarations(KnowledgeBase knowledgeBase) { + StringBuilder sb = new StringBuilder(); + + sb.append(getBaseString(knowledgeBase)); + + knowledgeBase.getPrefixes().forEachRemaining((String prefixName) -> { + try { + sb.append(getPrefixString(prefixName, knowledgeBase.getPrefix(prefixName))); + } catch (PrefixDeclarationException e) { + // this shouldn't throw, since we're iterating over known prefixes. + throw new RuntimeException(e); + } + }); + + return sb.toString(); + } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java index b8735c4fd..92d3b779c 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -508,6 +508,15 @@ public String getBase() { return this.prefixDeclarations.getBase(); } + /* + * Return the declared prefixes. + * + * @return an iterator over all known prefixes. + */ + public Iterator getPrefixes() { + return this.prefixDeclarations.iterator(); + } + /** * Resolve {@code prefixName} into the declared IRI. * diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 55985c1e2..968c53b51 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -1,6 +1,7 @@ package org.semanticweb.vlog4j.core.reasoner; import java.io.FileNotFoundException; +import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; @@ -83,7 +84,7 @@ static Reasoner getInstance() { /** * Getter for the knowledge base to reason on. - * + * * @return the reasoner's knowledge base */ KnowledgeBase getKnowledgeBase(); @@ -91,7 +92,7 @@ static Reasoner getInstance() { /** * Exports all the (explicit and implicit) facts inferred during reasoning of * the knowledge base to an OutputStream. - * + * * @param an OutputStream for the facts to be written to. * @return the correctness of the query answers, depending on the state of the * reasoning (materialisation) and its {@link KnowledgeBase}. @@ -103,13 +104,17 @@ static Reasoner getInstance() { * Exports all the (explicit and implicit) facts inferred during reasoning of * the knowledge base to a desired file. * - * @param a String of the file path for the facts to be written to. + * @param filePath a String of the file path for the facts to be written to. * @return the correctness of the query answers, depending on the state of the * reasoning (materialisation) and its {@link KnowledgeBase}. * @throws IOException * @throws FileNotFoundException */ - Correctness writeInferences(String filePath) throws FileNotFoundException, IOException; + default Correctness writeInferences(String filePath) throws FileNotFoundException, IOException { + try (OutputStream stream = new FileOutputStream(filePath)) { + return writeInferences(stream); + } + } /** * Sets the algorithm that will be used for reasoning over the knowledge base. @@ -203,7 +208,7 @@ static Reasoner getInstance() { /** * Checks whether the loaded rules and loaded fact EDB predicates are Acyclic, * Cyclic, or cyclicity cannot be determined. - * + * * @return the appropriate CyclicityResult. */ CyclicityResult checkForCycles(); @@ -215,7 +220,7 @@ static Reasoner getInstance() { * predicates, reasoning by {@link Algorithm#SKOLEM_CHASE Skolem chase} (and, * implicitly, the {@link Algorithm#RESTRICTED_CHASE Restricted chase}) will * always terminate. - * + * * @return {@code true}, if the loaded set of rules is Joint Acyclic with * respect to the EDB predicates of loaded facts.
              * {@code false}, otherwise @@ -228,7 +233,7 @@ static Reasoner getInstance() { * RJA, then, for the given set of rules and any facts over the given EDB * predicates, reasoning by {@link Algorithm#RESTRICTED_CHASE Restricted chase} * will always terminate - * + * * @return {@code true}, if the loaded set of rules is Restricted Joint Acyclic * with respect to the EDB predicates of loaded facts.
              * {@code false}, otherwise @@ -242,7 +247,7 @@ static Reasoner getInstance() { * predicates, reasoning by {@link Algorithm#SKOLEM_CHASE Skolem chase} (and, * implicitly, the {@link Algorithm#RESTRICTED_CHASE Restricted chase}) will * always terminate - * + * * @return {@code true}, if the loaded set of rules is Model-Faithful Acyclic * with respect to the EDB predicates of loaded facts.
              * {@code false}, otherwise @@ -256,7 +261,7 @@ static Reasoner getInstance() { * over the given EDB predicates, reasoning by {@link Algorithm#RESTRICTED_CHASE * Restricted chase} will always terminate. If a set of rules and EDB predicates * is MFA, then it is also JA. - * + * * @return {@code true}, if the loaded set of rules is Restricted Model-Faithful * Acyclic with respect to the EDB predicates of loaded facts.
              * {@code false}, otherwise @@ -271,7 +276,7 @@ static Reasoner getInstance() { * is guaranteed not to terminate for the loaded rules. If a set of rules and * EDB predicates is RMFA, then it is also RJA. Therefore, if a set or rules and * EDB predicates is MFC, it is not MFA, nor JA. - * + * * @return {@code true}, if the loaded set of rules is Model-Faithful Cyclic * with respect to the EDB predicates of loaded facts.
              * {@code false}, otherwise @@ -290,7 +295,7 @@ static Reasoner getInstance() { * more cases.
              * To avoid non-termination, a reasoning timeout can be set * ({@link Reasoner#setReasoningTimeout(Integer)}).
              - * + * * @return *
                *
              • {@code true}, if materialisation reached completion.
              • @@ -315,7 +320,7 @@ static Reasoner getInstance() { * A query answer is represented by a {@link QueryResult}. A query can have * multiple, distinct query answers. This method returns an Iterator over these * answers.
                - * + * * Depending on the state of the reasoning (materialisation) and its * {@link KnowledgeBase}, the answers can have a different {@link Correctness} * ({@link QueryResultIterator#getCorrectness()}): @@ -340,7 +345,7 @@ static Reasoner getInstance() { * knowledge base. Re-materialisation ({@link Reasoner#reason()}) is required, * in order to obtain correct query answers. *
              - * + * * * @param query a {@link PositiveLiteral} representing the query to be * answered. @@ -360,7 +365,7 @@ static Reasoner getInstance() { * * Evaluates an atomic ({@code query}), and counts the number of query answer * implicit facts loaded into the reasoner and the number of query answer * explicit facts materialised by the reasoner. - * + * * @param query a {@link PositiveLiteral} representing the query to be answered. * * @return a {@link QueryAnswerCount} object that contains the query answers @@ -387,7 +392,7 @@ default QueryAnswerCount countQueryAnswers(final PositiveLiteral query) { * the {@code query} are matched by terms in the fact, either named * ({@link TermType#CONSTANT}) or anonymous ({@link TermType#NAMED_NULL}). The * same variable name identifies the same term in the answer fact.
              - * + * * Depending on the state of the reasoning (materialisation) and its * {@link KnowledgeBase}, the answers can have a different {@link Correctness} * ({@link QueryResultIterator#getCorrectness()}): @@ -412,14 +417,14 @@ default QueryAnswerCount countQueryAnswers(final PositiveLiteral query) { * knowledge base. Re-materialisation ({@link Reasoner#reason()}) is required, * in order to obtain correct query answers. *
            - * + * * * @param query a {@link PositiveLiteral} representing the query to be * answered. * @param includeNulls if {@code true}, facts with {@link TermType#NAMED_NULL} * terms will be counted. Otherwise, facts with * {@link TermType#NAMED_NULL} terms will be ignored. - * + * * @return a {@link QueryAnswerCount} object that contains the query answers * Correctness and the number query answers, i.e. the number of facts in * the extension of the query. @@ -482,7 +487,7 @@ default QueryAnswerCount countQueryAnswers(final PositiveLiteral query) { * ({@link Reasoner#reason()}) is required, in order to obtain correct * query answers. *
          - * + * */ Correctness exportQueryAnswersToCsv(PositiveLiteral query, String csvFilePath, boolean includeNulls) throws IOException; diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 690eb2019..ebbf50ad1 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -10,6 +10,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Function; import org.apache.commons.lang3.Validate; import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; @@ -464,6 +465,8 @@ public Correctness writeInferences(OutputStream stream) throws IOException { } final Set toBeQueriedHeadPredicates = getKnowledgeBasePredicates(); + stream.write(Serializer.getBaseAndPrefixDeclarations(knowledgeBase).getBytes()); + for (final Predicate predicate : toBeQueriedHeadPredicates) { final PositiveLiteral queryAtom = getQueryAtom(predicate); final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(queryAtom); @@ -471,7 +474,8 @@ public Correctness writeInferences(OutputStream stream) throws IOException { while (answers.hasNext()) { final karmaresearch.vlog.Term[] vlogTerms = answers.next(); final List termList = VLogToModelConverter.toTermList(vlogTerms); - stream.write(Serializer.getFactString(predicate, termList).getBytes()); + stream.write(Serializer.getFactString(predicate, termList, knowledgeBase::unresolveAbsoluteIri) + .getBytes()); } } catch (final NotStartedException e) { throw new RuntimeException("Inconsistent reasoner state.", e); @@ -484,13 +488,6 @@ public Correctness writeInferences(OutputStream stream) throws IOException { return this.correctness; } - @Override - public Correctness writeInferences(String filePath) throws FileNotFoundException, IOException { - try (OutputStream stream = new FileOutputStream(filePath)) { - return writeInferences(stream); - } - } - private void logWarningOnCorrectness() { if (this.correctness != Correctness.SOUND_AND_COMPLETE) { LOGGER.warn("Query answers may be {} with respect to the current Knowledge Base!", this.correctness); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java index a83ff8f3e..f515cd6db 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java @@ -48,33 +48,33 @@ public void init() { @Test public void setBase_changingBase_succeeds() { prefixDeclarations.setBase(BASE); - assertEquals(prefixDeclarations.getBase(), BASE); + assertEquals(BASE, prefixDeclarations.getBase()); prefixDeclarations.setBase(MORE_SPECIFIC); - assertEquals(prefixDeclarations.getBase(), MORE_SPECIFIC); + assertEquals(MORE_SPECIFIC, prefixDeclarations.getBase()); } @Test public void setBase_redeclareSameBase_succeeds() { prefixDeclarations.setBase(BASE); - assertEquals(prefixDeclarations.getBase(), BASE); + assertEquals(BASE, prefixDeclarations.getBase()); prefixDeclarations.setBase(BASE); - assertEquals(prefixDeclarations.getBase(), BASE); + assertEquals(BASE, prefixDeclarations.getBase()); } @Test public void absolutize_noBase_identical() { - assertEquals(prefixDeclarations.absolutize(RELATIVE), RELATIVE); + assertEquals(RELATIVE, prefixDeclarations.absolutize(RELATIVE)); } @Test public void absolutize_base_absoluteIri() { prefixDeclarations.setBase(BASE); - assertEquals(prefixDeclarations.absolutize(RELATIVE), BASE + RELATIVE); + assertEquals(BASE + RELATIVE, prefixDeclarations.absolutize(RELATIVE)); } @Test public void absolutize_absoluteIri_identical() { - assertEquals(prefixDeclarations.absolutize(BASE), BASE); + assertEquals(BASE, prefixDeclarations.absolutize(BASE)); } @Test(expected = PrefixDeclarationException.class) @@ -85,7 +85,7 @@ public void resolvePrefixedName_undeclaredPrefix_throws() throws PrefixDeclarati @Test public void resolvePrefixedName_knownPrefix_succeeds() throws PrefixDeclarationException { prefixDeclarations.setPrefix("eg:", BASE); - assertEquals(prefixDeclarations.resolvePrefixedName("eg:" + RELATIVE), BASE + RELATIVE); + assertEquals(BASE + RELATIVE, prefixDeclarations.resolvePrefixedName("eg:" + RELATIVE)); } @Test @@ -94,13 +94,14 @@ public void resolvePrefixedName_unresolveAbsoluteIri_doesRoundTrip() throws Pref prefixDeclarations.setPrefix(prefix, BASE); String resolved = BASE + RELATIVE; String unresolved = prefixDeclarations.unresolveAbsoluteIri(resolved); - assertEquals(prefixDeclarations.resolvePrefixedName(unresolved), resolved); + assertEquals(resolved, prefixDeclarations.resolvePrefixedName(unresolved)); } @Test - public void setPrefix_redeclarePrefix_succeeds() { + public void setPrefix_redeclarePrefix_succeeds() throws PrefixDeclarationException { prefixDeclarations.setPrefix("eg:", BASE); prefixDeclarations.setPrefix("eg:", MORE_SPECIFIC); + assertEquals(BASE, prefixDeclarations.getPrefix("eg:")); } @Test @@ -110,14 +111,14 @@ public void getFreshPrefix_registeredPrefix_returnsFreshPrefix() throws PrefixDe prefixDeclarations.setPrefix("eg:", BASE); prefixDeclarations.setPrefix("eg:", MORE_SPECIFIC); - assertEquals(prefixDeclarations.getPrefix(prefix + "1:"), MORE_SPECIFIC); + assertEquals(MORE_SPECIFIC, prefixDeclarations.getPrefix(prefix + "1:")); } @Test public void mergeablePrefixDeclarations_constructor_succeeds() throws PrefixDeclarationException { this.prefixDeclarations.setPrefix("eg:", MORE_SPECIFIC); MergeablePrefixDeclarations prefixDeclarations = new MergeablePrefixDeclarations(this.prefixDeclarations); - assertEquals(prefixDeclarations.getPrefix("eg:"), MORE_SPECIFIC); + assertEquals(MORE_SPECIFIC, prefixDeclarations.getPrefix("eg:")); } @Test(expected = RuntimeException.class) @@ -132,42 +133,49 @@ public void mergePrefixDeclarations_getPrefixUnexpectedlyThrows_throws() throws @Test public void unresolveAbsoluteIri_default_identical() { - assertEquals(prefixDeclarations.unresolveAbsoluteIri(BASE), BASE); + assertEquals(BASE, prefixDeclarations.unresolveAbsoluteIri(BASE)); } @Test public void unresolveAbsoluteIri_declaredPrefix_succeeds() { - assertEquals(prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC), MORE_SPECIFIC); + assertEquals(MORE_SPECIFIC, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC)); prefixDeclarations.setPrefix("eg:", BASE); - assertEquals(prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC), "eg:example/"); + assertEquals("eg:example/", prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC)); } @Test public void unresolveAbsoluteIri_unrelatedPrefix_identical() { prefixDeclarations.setPrefix("eg:", UNRELATED); - assertEquals(prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC), MORE_SPECIFIC); + assertEquals(MORE_SPECIFIC, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC)); } @Test public void unresolveAbsoluteIri_unrelatedAndRelatedPrefixes_succeeds() { prefixDeclarations.setPrefix("ex:", UNRELATED); prefixDeclarations.setPrefix("eg:", BASE); - assertEquals(prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC), "eg:example/"); + assertEquals("eg:example/", prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC)); } @Test public void unresolveAbsoluteIri_multipleMatchingPrefixes_longestMatchWins() { prefixDeclarations.setPrefix("eg:", BASE); prefixDeclarations.setPrefix("ex:", MORE_SPECIFIC); - assertEquals(prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE), "ex:" + RELATIVE); + assertEquals("ex:" + RELATIVE, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE)); prefixDeclarations.setPrefix("er:", EVEN_MORE_SPECIFIC); - assertEquals(prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE), "er:test"); + assertEquals("er:test", prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE)); } @Test public void unresolveAbsoluteIri_exactPrefixMatch_identical() { prefixDeclarations.setPrefix("eg:", BASE); - assertEquals(prefixDeclarations.unresolveAbsoluteIri(BASE), BASE); + assertEquals(BASE, prefixDeclarations.unresolveAbsoluteIri(BASE)); + } + + @Test + public void unresolveAbsoluteIri_baseIsMoreSpecific_baseWins() { + prefixDeclarations.setBase(MORE_SPECIFIC); + prefixDeclarations.setPrefix("eg:", BASE); + assertEquals(RELATIVE, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE)); } @Test @@ -176,6 +184,6 @@ public void unresolveAbsoluteIri_resolvePrefixedName_doesRoundTrip() throws Pref prefixDeclarations.setPrefix(prefix, BASE); String unresolved = prefix + RELATIVE; String resolved = prefixDeclarations.resolvePrefixedName(unresolved); - assertEquals(prefixDeclarations.unresolveAbsoluteIri(resolved), unresolved); + assertEquals(unresolved, prefixDeclarations.unresolveAbsoluteIri(resolved)); } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java new file mode 100644 index 000000000..4ef6212c1 --- /dev/null +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java @@ -0,0 +1,133 @@ +package org.semanticweb.vlog4j.core.reasoner.implementation; + +import static org.junit.Assert.*; +import static org.mockito.Mockito.*; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.junit.Before; +import org.junit.Test; +import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; +import org.semanticweb.vlog4j.core.model.api.AbstractConstant; +import org.semanticweb.vlog4j.core.model.api.Conjunction; +import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.UniversalVariable; +import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.Reasoner; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +public class VLogReasonerWriteInferencesTest { + private final Constant c = Expressions.makeAbstractConstant("http://example.org/c"); + private final Fact fact = Expressions.makeFact("http://example.org/s", c); + private final AbstractConstant dresdenConst = Expressions.makeAbstractConstant("dresden"); + private final Predicate locatedInPred = Expressions.makePredicate("LocatedIn", 2); + private final Predicate addressPred = Expressions.makePredicate("address", 4); + private final Predicate universityPred = Expressions.makePredicate("university", 2); + private final UniversalVariable varX = Expressions.makeUniversalVariable("X"); + private final UniversalVariable varY = Expressions.makeUniversalVariable("Y"); + private final PositiveLiteral pl1 = Expressions.makePositiveLiteral(locatedInPred, varX, varY); + private final PositiveLiteral pl2 = Expressions.makePositiveLiteral("location", varX, varY); + private final PositiveLiteral pl3 = Expressions.makePositiveLiteral(addressPred, varX, + Expressions.makeExistentialVariable("Y"), Expressions.makeExistentialVariable("Z"), + Expressions.makeExistentialVariable("Q")); + private final PositiveLiteral pl4 = Expressions.makePositiveLiteral(locatedInPred, + Expressions.makeExistentialVariable("Q"), Expressions.makeUniversalVariable("F")); + private final PositiveLiteral pl5 = Expressions.makePositiveLiteral(universityPred, varX, + Expressions.makeUniversalVariable("F")); + private final Conjunction conjunction = Expressions.makePositiveConjunction(pl3, pl4); + private final Rule rule1 = Expressions.makeRule(pl1, pl2); + private final Rule rule2 = Expressions.makeRule(conjunction, Expressions.makeConjunction(pl5)); + private final Fact f1 = Expressions.makeFact(locatedInPred, Expressions.makeAbstractConstant("Egypt"), + Expressions.makeAbstractConstant("Africa")); + private final Fact f2 = Expressions.makeFact(addressPred, Expressions.makeAbstractConstant("TSH"), + Expressions.makeAbstractConstant("Pragerstraße13"), Expressions.makeAbstractConstant("01069"), + dresdenConst); + private final Fact f3 = Expressions.makeFact("city", dresdenConst); + private final Fact f4 = Expressions.makeFact("country", Expressions.makeAbstractConstant("germany")); + private final Fact f5 = Expressions.makeFact(universityPred, Expressions.makeAbstractConstant("tudresden"), + Expressions.makeAbstractConstant("germany")); + private final InMemoryDataSource locations = new InMemoryDataSource(2, 1); + private KnowledgeBase kb; + + @Before + public void initKb() { + kb = new KnowledgeBase(); + kb.addStatement(fact); + kb.addStatements(rule1, rule2, f1, f2, f3, f4, f5); + locations.addTuple("dresden", "germany"); + kb.addStatement(new DataSourceDeclarationImpl(Expressions.makePredicate("location", 2), locations)); + } + + @Test + public void writeInferences_example_succeeds() throws IOException { + assertEquals(10, getInferences().size()); + } + + @Test + public void writeInferences_withPrefixDeclarations_abbreviatesIris() + throws IOException, PrefixDeclarationException { + PrefixDeclarations prefixDeclarations = mock(PrefixDeclarations.class); + when(prefixDeclarations.getBase()).thenReturn(""); + when(prefixDeclarations.getPrefix(eq("eg:"))).thenReturn("http://example.org/"); + when(prefixDeclarations.iterator()).thenReturn(Arrays.asList("eg:").iterator()); + kb.mergePrefixDeclarations(prefixDeclarations); + + assertEquals(11, getInferences().size()); + assertTrue("the abbreviated fact is present", getInferences().contains("eg:s(eg:c) .")); + } + + @Test + public void writeInferences_withBase_writesBase() throws IOException, PrefixDeclarationException { + PrefixDeclarations prefixDeclarations = mock(PrefixDeclarations.class); + when(prefixDeclarations.getBase()).thenReturn("http://example.org/"); + when(prefixDeclarations.iterator()).thenReturn(Arrays.asList().iterator()); + kb.mergePrefixDeclarations(prefixDeclarations); + + assertEquals(11, getInferences().size()); + assertTrue("the base declaration is present", getInferences().contains("@base .")); + } + + private List getInferences() throws IOException { + try (final Reasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + ByteArrayOutputStream stream = new ByteArrayOutputStream(); + reasoner.writeInferences(stream); + stream.flush(); + + Stream inferences = Arrays.stream(stream.toString().split("(?<=[>)]\\s*)\\.\\s*")); + + return inferences.map((String inference) -> inference + ".").collect(Collectors.toList()); + } + } +} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VlogReasonerWriteInferencesTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VlogReasonerWriteInferencesTest.java deleted file mode 100644 index 73af579c0..000000000 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VlogReasonerWriteInferencesTest.java +++ /dev/null @@ -1,101 +0,0 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; - -import static org.junit.Assert.assertEquals; - -import java.io.BufferedReader; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.StringReader; -import java.util.ArrayList; -import java.util.List; -import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.AbstractConstant; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.UniversalVariable; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -public class VlogReasonerWriteInferencesTest { - final Constant c = Expressions.makeAbstractConstant("http://example.org/c"); - final Fact fact = Expressions.makeFact("http://example.org/s", c); - final AbstractConstant dresdenConst = Expressions.makeAbstractConstant("dresden"); - final Predicate locatedInPred = Expressions.makePredicate("LocatedIn", 2); - final Predicate addressPred = Expressions.makePredicate("address", 4); - final Predicate universityPred = Expressions.makePredicate("university", 2); - final UniversalVariable varX = Expressions.makeUniversalVariable("X"); - final UniversalVariable varY = Expressions.makeUniversalVariable("Y"); - final PositiveLiteral pl1 = Expressions.makePositiveLiteral(locatedInPred, varX, varY); - final PositiveLiteral pl2 = Expressions.makePositiveLiteral("location", varX, varY); - final PositiveLiteral pl3 = Expressions.makePositiveLiteral(addressPred, varX, - Expressions.makeExistentialVariable("Y"), Expressions.makeExistentialVariable("Z"), - Expressions.makeExistentialVariable("Q")); - final PositiveLiteral pl4 = Expressions.makePositiveLiteral(locatedInPred, Expressions.makeExistentialVariable("Q"), - Expressions.makeUniversalVariable("F")); - final PositiveLiteral pl5 = Expressions.makePositiveLiteral(universityPred, varX, - Expressions.makeUniversalVariable("F")); - final Conjunction conjunction = Expressions.makePositiveConjunction(pl3, pl4); - final Rule rule1 = Expressions.makeRule(pl1, pl2); - final Rule rule2 = Expressions.makeRule(conjunction, Expressions.makeConjunction(pl5)); - final Fact f1 = Expressions.makeFact(locatedInPred, Expressions.makeAbstractConstant("Egypt"), - Expressions.makeAbstractConstant("Africa")); - final Fact f2 = Expressions.makeFact(addressPred, Expressions.makeAbstractConstant("TSH"), - Expressions.makeAbstractConstant("Pragerstraße13"), Expressions.makeAbstractConstant("01069"), - dresdenConst); - final Fact f3 = Expressions.makeFact("city", dresdenConst); - final Fact f4 = Expressions.makeFact("country", Expressions.makeAbstractConstant("germany")); - final Fact f5 = Expressions.makeFact(universityPred, Expressions.makeAbstractConstant("tudresden"), - Expressions.makeAbstractConstant("germany")); - final InMemoryDataSource locations = new InMemoryDataSource(2, 1); - - @Test - public void testWriteInferences() throws IOException { - KnowledgeBase kb = new KnowledgeBase(); - kb.addStatement(fact); - kb.addStatements(rule1, rule2, f1, f2, f3, f4, f5); - locations.addTuple("dresden", "germany"); - kb.addStatement(new DataSourceDeclarationImpl(Expressions.makePredicate("location", 2), locations)); - List inferences = new ArrayList(); - try (final Reasoner reasoner = new VLogReasoner(kb)) { - reasoner.reason(); - ByteArrayOutputStream stream = new ByteArrayOutputStream(); - reasoner.writeInferences(stream); - stream.flush(); - try (BufferedReader input = new BufferedReader(new StringReader(stream.toString()))) { - String factString = ""; - while ((factString = input.readLine()) != null) { - inferences.add(factString); - } - - } - assertEquals(10, inferences.size()); - } - - } -} From 43d4ec3b19c274a396d71933e6fc824cd70bca4c Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 18 Feb 2020 15:49:52 +0100 Subject: [PATCH 0559/1003] Core: Fix VLogReasonerWriteInferenceTest on Java 8 Java 8 regexes do not allow unbounded lookbehind. --- .../implementation/VLogReasonerWriteInferencesTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java index 4ef6212c1..036335328 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java @@ -125,7 +125,7 @@ private List getInferences() throws IOException { reasoner.writeInferences(stream); stream.flush(); - Stream inferences = Arrays.stream(stream.toString().split("(?<=[>)]\\s*)\\.\\s*")); + Stream inferences = Arrays.stream(stream.toString().split("(?<=[>)]\\s?)\\.\\s*")); return inferences.map((String inference) -> inference + ".").collect(Collectors.toList()); } From 5757c2b3ce0ce0c97750ae952927d588e870a24d Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 21 Feb 2020 14:57:40 +0100 Subject: [PATCH 0560/1003] Core: Simplify sorting in MergeablePrefixDeclarations --- .../model/implementation/MergeablePrefixDeclarations.java | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java index dbea5e454..6365a77a9 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java @@ -22,6 +22,7 @@ import java.net.URI; import java.util.ArrayList; +import java.util.Comparator; import java.util.HashMap; import java.util.Iterator; import java.util.List; @@ -116,10 +117,8 @@ public String unresolveAbsoluteIri(String iri) { }); List matchesByLength = new ArrayList<>(matches.keySet()); - matchesByLength.sort((left, right) -> { - // inverse order, so we get the longest match first - return matches.get(right).compareTo(matches.get(left)); - }); + // reverse order, so we get the longest match first + matchesByLength.sort(Comparator.comparing(matches::get).reversed()); if (matchesByLength.size() > 0) { return matchesByLength.get(0); From 2e52f864a5b63108c269071c03af5e1c743ac1e4 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 28 Feb 2020 19:10:50 +0100 Subject: [PATCH 0561/1003] Core: Refactor handling of prefix declarations --- .../PrefixDeclarationException.java | 3 - .../core/exceptions/VLog4jException.java | 11 +- .../model/api/LanguageStringConstant.java | 2 +- ...ns.java => PrefixDeclarationRegistry.java} | 49 ++++- .../AbstractPrefixDeclarationRegistry.java | 94 +++++++++ .../MergeablePrefixDeclarations.java | 183 ------------------ .../MergingPrefixDeclarationRegistry.java | 151 +++++++++++++++ .../core/model/implementation/Serializer.java | 36 ++-- .../vlog4j/core/reasoner/KnowledgeBase.java | 26 +-- ...MergingPrefixDeclarationRegistryTest.java} | 109 +++++------ .../vlog4j/core/model/TermImplTest.java | 6 +- .../core/reasoner/KnowledgeBaseTest.java | 8 +- .../VLogReasonerWriteInferencesTest.java | 23 ++- .../vlog4j/parser/DirectiveHandler.java | 8 +- .../LocalPrefixDeclarationRegistry.java | 94 +++++++++ .../parser/LocalPrefixDeclarations.java | 118 ----------- .../vlog4j/parser/ParserConfiguration.java | 4 +- .../vlog4j/parser/ParsingException.java | 15 +- .../semanticweb/vlog4j/parser/RuleParser.java | 8 +- .../ImportFileRelativeDirectiveHandler.java | 8 +- .../vlog4j/parser/javacc/JavaCCParser.jj | 8 +- .../parser/javacc/JavaCCParserBase.java | 26 +-- .../parser/javacc/SubParserFactory.java | 8 +- .../parser/RuleParserParseFactTest.java | 6 +- .../vlog4j/parser/RuleParserTest.java | 32 +-- .../vlog4j/rdf/RdfValueToTermConverter.java | 4 +- .../vlog4j/rdf/TestConvertRdfFileToFacts.java | 4 +- 27 files changed, 550 insertions(+), 494 deletions(-) rename vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/{PrefixDeclarations.java => PrefixDeclarationRegistry.java} (58%) create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractPrefixDeclarationRegistry.java delete mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergingPrefixDeclarationRegistry.java rename vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/{MergeablePrefixDeclarationsTest.java => MergingPrefixDeclarationRegistryTest.java} (54%) create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarationRegistry.java delete mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java index 7c209c5cf..78cb523c1 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java @@ -21,9 +21,6 @@ */ public class PrefixDeclarationException extends VLog4jException { - /** - * - */ private static final long serialVersionUID = 1L; public PrefixDeclarationException(String errorMessage) { diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/VLog4jException.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/VLog4jException.java index efcb1bf29..c87c6ca3b 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/VLog4jException.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/VLog4jException.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -32,6 +32,10 @@ public class VLog4jException extends Exception { */ private static final long serialVersionUID = 8305375071519734590L; + public VLog4jException(Throwable cause) { + super(cause); + } + public VLog4jException(String message, Throwable cause) { super(message, cause); } @@ -40,4 +44,7 @@ public VLog4jException(String message) { super(message); } + public VLog4jException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { + super(message, cause, enableSuppression, writableStackTrace); + } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java index c38b0f901..0f1296d0f 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java @@ -46,7 +46,7 @@ default TermType getType() { * @return a IRI of RDF langString datatype */ default String getDatatype() { - return PrefixDeclarations.RDF_LANGSTRING; + return PrefixDeclarationRegistry.RDF_LANGSTRING; } /** diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarations.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarationRegistry.java similarity index 58% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarations.java rename to vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarationRegistry.java index 942c10a60..ae4c1f6c4 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarations.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarationRegistry.java @@ -1,7 +1,5 @@ package org.semanticweb.vlog4j.core.model.api; -import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; - /*- * #%L * vlog4j-syntax @@ -22,13 +20,17 @@ * #L% */ +import java.util.Map.Entry; + +import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; + /** * Registry that manages prefixes and base namespace declarations as used for * parsing and serialising inputs. * * @author Markus Kroetzsch */ -public interface PrefixDeclarations extends Iterable { +public interface PrefixDeclarationRegistry extends Iterable> { static final String XSD = "http://www.w3.org/2001/XMLSchema#"; static final String XSD_STRING = "http://www.w3.org/2001/XMLSchema#string"; @@ -39,28 +41,57 @@ public interface PrefixDeclarations extends Iterable { static final String XSD_BOOLEAN = "http://www.w3.org/2001/XMLSchema#boolean"; static final String RDF_LANGSTRING = "http://www.w3.org/1999/02/22-rdf-syntax-ns#langString"; + static final String EMPTY_BASE = ""; + static final String PREFIX_NAME_SEPARATOR = ":"; + /** * Returns the relevant base namespace. This should always return a result, * possibly using a local default value if no base was declared. * * @return string of an absolute base IRI */ - String getBase(); + String getBaseIri(); /** * Sets the base namespace to the given value. This should only be done once, * and not after the base namespace was assumed to be an implicit default value. * - * @param base the new base namespace + * @param baseIri the new base namespace * @throws PrefixDeclarationException if base was already defined */ - void setBase(String base) throws PrefixDeclarationException; + void setBaseIri(String baseIri) throws PrefixDeclarationException; - String getPrefix(String prefix) throws PrefixDeclarationException; + /** + * Returns the IRI associated with a given prefix name. + * + * @param prefixName the name of the prefix. + * @throws PrefixDeclarationException if prefixName was not defined. + */ + String getPrefixIri(String prefixName) throws PrefixDeclarationException; - void setPrefix(String prefix, String iri) throws PrefixDeclarationException; + /** + * Registers a prefix declaration. Behaviour is implementation-defined if + * prefixName has already been registered. + * + * @param prefixName the name of the prefix. + * @param prefixIri the IRI of the prefix. + * + * @throws PrefixDeclarationException when prefixName is already registered, at + * the discretion of the implementation. + */ + void setPrefixIri(String prefixName, String prefixIri) throws PrefixDeclarationException; + /** + * Turn a prefixed name into an absolute IRIna. + */ String resolvePrefixedName(String prefixedName) throws PrefixDeclarationException; - String absolutize(String prefixedName) throws PrefixDeclarationException; + /** + * Turn a prefixed name or a potentially relative IRI into an absolute IRI. + * + * @param prefixedNameOrIri either a prefixedName or an IRI. + * @throws PrefixDeclarationException when called on a prefixedName using an + * unknown prefixName. + */ + String absolutizeIri(String potentiallyRelativeIri) throws PrefixDeclarationException; } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractPrefixDeclarationRegistry.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractPrefixDeclarationRegistry.java new file mode 100644 index 000000000..d41bfabfb --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractPrefixDeclarationRegistry.java @@ -0,0 +1,94 @@ +package org.semanticweb.vlog4j.core.model.implementation; + +/*- + * #%L + * vlog4j-syntax + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; +import java.util.Map.Entry; + +import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; + +/** + * Implementation of the common logic for prefix declaration registries. + * + * @author Maximilian Marx + */ +public abstract class AbstractPrefixDeclarationRegistry implements PrefixDeclarationRegistry { + /** + * Map associating each prefixName with the full prefixIri. + */ + protected Map prefixes = new HashMap<>(); + + /** + * Iri holding the base namespace. + */ + protected String baseUri = PrefixDeclarationRegistry.EMPTY_BASE; + + @Override + public String getBaseIri() { + return baseUri; + } + + @Override + public String getPrefixIri(String prefixName) throws PrefixDeclarationException { + if (!prefixes.containsKey(prefixName)) { + throw new PrefixDeclarationException( + "Prefix \"" + prefixName + "\" cannot be resolved (not declared yet)."); + } + + return prefixes.get(prefixName); + } + + @Override + public String resolvePrefixedName(String prefixedName) throws PrefixDeclarationException { + int colon = prefixedName.indexOf(":"); + String prefix = prefixedName.substring(0, colon + 1); + String suffix = prefixedName.substring(colon + 1); + + return getPrefixIri(prefix) + suffix; + } + + @Override + public String absolutizeIri(String potentiallyRelativeIri) throws PrefixDeclarationException { + URI relative; + + try { + relative = new URI(potentiallyRelativeIri); + } catch (URISyntaxException e) { + throw new PrefixDeclarationException("Failed to parse IRI", e); + } + + if (relative.isAbsolute()) { + return potentiallyRelativeIri; + } else { + return getBaseIri() + potentiallyRelativeIri; + } + } + + @Override + public Iterator> iterator() { + return this.prefixes.entrySet().iterator(); + } +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java deleted file mode 100644 index 6365a77a9..000000000 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java +++ /dev/null @@ -1,183 +0,0 @@ -package org.semanticweb.vlog4j.core.model.implementation; - -/*- - * #%L - * vlog4j-parser - * %% - * Copyright (C) 2018 - 2019 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.net.URI; -import java.util.ArrayList; -import java.util.Comparator; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; - -/** - * Implementation of {@link PrefixDeclarations} that is suitable for - * incrementally parsing from multiple sources. When trying to merge in - * conflicting prefix declarations, a fresh non-conflicting prefix is generated - * instead. - * - * @author Maximilian Marx - */ -final public class MergeablePrefixDeclarations implements PrefixDeclarations { - private Map prefixes = new HashMap<>(); - - private String baseUri = EMPTY_BASE_PREFIX; - private long nextIndex = 0; - - private static final String EMPTY_BASE_PREFIX = ""; - private static final String GENERATED_PREFIX_PREFIX = "vlog4j_generated_"; - - public MergeablePrefixDeclarations() { - } - - public MergeablePrefixDeclarations(final PrefixDeclarations prefixDeclarations) { - super(); - mergePrefixDeclarations(prefixDeclarations); - } - - @Override - public String getBase() { - return baseUri; - } - - @Override - public void setBase(String base) { - if (base != this.baseUri && this.baseUri != EMPTY_BASE_PREFIX) { - prefixes.put(getFreshPrefix(), this.baseUri); - } - - this.baseUri = base; - } - - @Override - public String getPrefix(String prefix) throws PrefixDeclarationException { - if (!prefixes.containsKey(prefix)) { - throw new PrefixDeclarationException("Prefix \"" + prefix + "\" cannot be resolved (not declared yet)."); - } - return prefixes.get(prefix); - } - - @Override - public void setPrefix(String prefix, String iri) { - String prefixName = prefixes.containsKey(prefix) ? getFreshPrefix() : prefix; - prefixes.put(prefixName, iri); - } - - @Override - public String resolvePrefixedName(String prefixedName) throws PrefixDeclarationException { - int colon = prefixedName.indexOf(":"); - String prefix = prefixedName.substring(0, colon + 1); - String suffix = prefixedName.substring(colon + 1); - - return getPrefix(prefix) + suffix; - } - - /** - * Turn an absolute Iri into a (possibly) prefixed name. Dual to - * {@link resolvePrefixedName}. - * - * @param iri an absolute Iri to abbreviate. - * - * @return an abbreviated form of {@code iri} if an appropriate prefix is known, - * or {@code iri}. - */ - public String unresolveAbsoluteIri(String iri) { - Map matches = new HashMap<>(); - - if (baseUri != EMPTY_BASE_PREFIX && iri.startsWith(baseUri) && !iri.equals(baseUri)) { - matches.put(iri.replaceFirst(baseUri, ""), baseUri.length()); - } - - prefixes.forEach((prefixName, prefixIri) -> { - // only select proper prefixes here, since `eg:` is not a valid prefixed name. - if (iri.startsWith(prefixIri) && !iri.equals(prefixIri)) { - matches.put(iri.replaceFirst(prefixIri, prefixName), prefixIri.length()); - } - }); - - List matchesByLength = new ArrayList<>(matches.keySet()); - // reverse order, so we get the longest match first - matchesByLength.sort(Comparator.comparing(matches::get).reversed()); - - if (matchesByLength.size() > 0) { - return matchesByLength.get(0); - } else { - // no matching prefix - return iri; - } - } - - @Override - public String absolutize(String iri) { - URI relative = URI.create(iri); - - if (relative.isAbsolute()) { - return iri; - } else { - return getBase() + iri; - } - } - - @Override - public Iterator iterator() { - return this.prefixes.keySet().iterator(); - } - - /** - * Merge another set of prefix declarations. - * - * @param other the set of prefix declarations to merge. Conflicting prefixes - * will be renamed. - * - * @return this - */ - public MergeablePrefixDeclarations mergePrefixDeclarations(final PrefixDeclarations other) { - this.setBase(other.getBase()); - - for (String prefixName : other) { - String iri; - try { - iri = other.getPrefix(prefixName); - } catch (PrefixDeclarationException e) { - // this shouldn't throw, since we already know that prefix is defined. - throw new RuntimeException(e); - } - - this.prefixes.put(prefixName, iri); - } - - return this; - } - - private String getFreshPrefix() { - for (long idx = nextIndex; true; ++idx) { - String freshPrefix = GENERATED_PREFIX_PREFIX + idx + ":"; - - if (!prefixes.containsKey(freshPrefix)) { - this.nextIndex = idx + 1; - return freshPrefix; - } - } - } -} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergingPrefixDeclarationRegistry.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergingPrefixDeclarationRegistry.java new file mode 100644 index 000000000..ac832aae5 --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergingPrefixDeclarationRegistry.java @@ -0,0 +1,151 @@ +package org.semanticweb.vlog4j.core.model.implementation; + +/*- + * #%L + * vlog4j-syntax + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.ArrayList; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; + +/** + * Implementation of {@link PrefixDeclarationRegistry} that is suitable for + * incrementally parsing from multiple sources. When trying to merge in + * conflicting prefix declarations, a fresh non-conflicting prefix is generated + * instead. + * + * @author Maximilian Marx + */ +final public class MergingPrefixDeclarationRegistry extends AbstractPrefixDeclarationRegistry { + /** + * Next index to use for generated prefix names. + */ + private long nextIndex = 0; + + /** + * Prefix string to use for generated prefix name + */ + private static final String GENERATED_PREFIX_PREFIX_STRING = "vlog4j_generated_"; + + public MergingPrefixDeclarationRegistry() { + super(); + } + + public MergingPrefixDeclarationRegistry(final PrefixDeclarationRegistry prefixDeclarations) { + super(); + mergePrefixDeclarations(prefixDeclarations); + } + + /** + * Sets the base namespace to the given value. If a base Iri has already been + * set, it will be added as a prefix declaration with a fresh prefixName. + * + * @param baseIri the new base namespace. + */ + @Override + public void setBaseIri(String baseIri) { + if (baseIri != this.baseUri && this.baseUri != PrefixDeclarationRegistry.EMPTY_BASE) { + prefixes.put(getFreshPrefix(), this.baseUri); + } + + this.baseUri = baseIri; + } + + /** + * Registers a prefix declaration. If prefixName is already registered, a + * freshly generated name will be used instead. + * + * @param prefixName the name of the prefix. + * @param prefixIri the IRI of the prefix. + */ + @Override + public void setPrefixIri(String prefixName, String prefixIri) { + String name = prefixes.containsKey(prefixName) ? getFreshPrefix() : prefixName; + prefixes.put(name, prefixIri); + } + + /** + * Turn an absolute Iri into a (possibly) prefixed name. Dual to + * {@link AbstractPrefixDeclarationRegistry#resolvePrefixedName}. + * + * @param iri an absolute Iri to abbreviate. + * + * @return an abbreviated form of {@code iri} if an appropriate prefix is known, + * or {@code iri}. + */ + public String unresolveAbsoluteIri(String iri) { + Map matches = new HashMap<>(); + + if (baseUri != PrefixDeclarationRegistry.EMPTY_BASE && iri.startsWith(baseUri) && !iri.equals(baseUri)) { + matches.put(iri.replaceFirst(baseUri, PrefixDeclarationRegistry.EMPTY_BASE), baseUri.length()); + } + + prefixes.forEach((prefixName, prefixIri) -> { + // only select proper prefixes here, since `eg:` is not a valid prefixed name. + if (iri.startsWith(prefixIri) && !iri.equals(prefixIri)) { + matches.put(iri.replaceFirst(prefixIri, prefixName), prefixIri.length()); + } + }); + + List matchesByLength = new ArrayList<>(matches.keySet()); + // reverse order, so we get the longest match first + matchesByLength.sort(Comparator.comparing(matches::get).reversed()); + + if (matchesByLength.size() > 0) { + return matchesByLength.get(0); + } else { + // no matching prefix + return iri; + } + } + + /** + * Merge another set of prefix declarations. + * + * @param other the set of prefix declarations to merge. Conflicting prefixes + * from {@code other} will be renamed. + * + * @return this + */ + public MergingPrefixDeclarationRegistry mergePrefixDeclarations(final PrefixDeclarationRegistry other) { + this.setBaseIri(other.getBaseIri()); + + for (Entry prefix : other) { + setPrefixIri(prefix.getKey(), prefix.getValue()); + } + + return this; + } + + private String getFreshPrefix() { + for (long idx = nextIndex; true; ++idx) { + String freshPrefix = GENERATED_PREFIX_PREFIX_STRING + idx + PrefixDeclarationRegistry.PREFIX_NAME_SEPARATOR; + + if (!prefixes.containsKey(freshPrefix)) { + this.nextIndex = idx + 1; + return freshPrefix; + } + } + } +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 8d0325fd1..ea03e05c3 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -1,6 +1,7 @@ package org.semanticweb.vlog4j.core.model.implementation; import java.util.List; +import java.util.Map.Entry; import java.util.function.Function; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; @@ -36,7 +37,7 @@ import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.NamedNull; import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.UniversalVariable; @@ -157,7 +158,7 @@ public static String getFactString(final Fact fact) { * Creates a String representation of a given {@link Constant}. * * @see Rule syntax . - * @param constant a {@link Constant} + * @param constant a {@link Constant} * @param iriTransformer a function to transform IRIs with. * @return String representation corresponding to a given {@link Constant}. */ @@ -195,16 +196,16 @@ public static String getConstantName(final LanguageStringConstant languageString * * @see Rule syntax . * @param datatypeConstant a {@link DatatypeConstant} - * @param iriTransformer a function to transform IRIs with. + * @param iriTransformer a function to transform IRIs with. * @return String representation corresponding to a given * {@link DatatypeConstant}. */ public static String getString(final DatatypeConstant datatypeConstant, Function iriTransformer) { - if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_STRING)) { + if (datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_STRING)) { return getString(datatypeConstant.getLexicalValue()); - } else if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DECIMAL) - || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_INTEGER) - || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DOUBLE)) { + } else if (datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_DECIMAL) + || datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_INTEGER) + || datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_DOUBLE)) { return datatypeConstant.getLexicalValue(); } else { return getConstantName(datatypeConstant, iriTransformer); @@ -233,9 +234,10 @@ public static String getString(final DatatypeConstant datatypeConstant) { * @return String representation corresponding to a given * {@link DatatypeConstant}. */ - public static String getConstantName(final DatatypeConstant datatypeConstant, Function iriTransformer) { + public static String getConstantName(final DatatypeConstant datatypeConstant, + Function iriTransformer) { return getString(datatypeConstant.getLexicalValue()) + DOUBLE_CARET - + getIRIString(datatypeConstant.getDatatype(), iriTransformer); + + getIRIString(datatypeConstant.getDatatype(), iriTransformer); } /** @@ -470,26 +472,18 @@ public static String getBaseString(KnowledgeBase knowledgeBase) { return ""; } - return BASE + addAngleBrackets(baseIri) + STATEMENT_SEPARATOR + "\n"; + return BASE + addAngleBrackets(baseIri) + STATEMENT_SEPARATOR + NEW_LINE; } - public static String getPrefixString(String prefixName, String prefixIri) { - return PREFIX + prefixName + " " + addAngleBrackets(prefixIri) + STATEMENT_SEPARATOR + "\n"; + public static String getPrefixString(Entry prefix) { + return PREFIX + prefix.getKey() + " " + addAngleBrackets(prefix.getValue()) + STATEMENT_SEPARATOR + NEW_LINE; } public static String getBaseAndPrefixDeclarations(KnowledgeBase knowledgeBase) { StringBuilder sb = new StringBuilder(); sb.append(getBaseString(knowledgeBase)); - - knowledgeBase.getPrefixes().forEachRemaining((String prefixName) -> { - try { - sb.append(getPrefixString(prefixName, knowledgeBase.getPrefix(prefixName))); - } catch (PrefixDeclarationException e) { - // this shouldn't throw, since we're iterating over known prefixes. - throw new RuntimeException(e); - } - }); + knowledgeBase.getPrefixes().forEachRemaining(prefix -> sb.append(getPrefixString(prefix))); return sb.toString(); } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java index 92d3b779c..da8211d95 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -14,18 +14,20 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.Map.Entry; import org.apache.commons.lang3.Validate; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; +import org.semanticweb.vlog4j.core.exceptions.VLog4jException; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.StatementVisitor; -import org.semanticweb.vlog4j.core.model.implementation.MergeablePrefixDeclarations; +import org.semanticweb.vlog4j.core.model.implementation.MergingPrefixDeclarationRegistry; /*- * #%L @@ -174,7 +176,7 @@ public Void visit(final DataSourceDeclaration statement) { * base. We try to preserve user-provided prefixes found in files when loading * data. */ - private MergeablePrefixDeclarations prefixDeclarations = new MergeablePrefixDeclarations(); + private MergingPrefixDeclarationRegistry prefixDeclarations = new MergingPrefixDeclarationRegistry(); /** * Index structure that organises all facts by their predicate. @@ -450,12 +452,12 @@ Map> getFactsByPredicate() { * This is essentially * {@link org.semanticweb.vlog4j.parser.RuleParser#parseInto}, but we need to * avoid a circular dependency here -- this is also why we throw - * {@link Exception} instead of + * {@link VLog4jException} instead of * {@link org.semanticweb.vlog4j.parser.ParsingException}. */ @FunctionalInterface public interface AdditionalInputParser { - KnowledgeBase parseInto(InputStream stream, KnowledgeBase kb) throws IOException, Exception; + KnowledgeBase parseInto(InputStream stream, KnowledgeBase kb) throws IOException, VLog4jException; } /** @@ -468,12 +470,12 @@ public interface AdditionalInputParser { * @throws IOException when reading {@code file} fails * @throws IllegalArgumentException when {@code file} is null or has already * been imported - * @throws RuntimeException when parseFunction throws + * @throws VLog4jException when parseFunction throws VLog4jException * * @return this */ public KnowledgeBase importRulesFile(File file, AdditionalInputParser parseFunction) - throws Exception, IOException, IllegalArgumentException { + throws VLog4jException, IOException, IllegalArgumentException { Validate.notNull(file, "file must not be null"); boolean isNewFile = importedFilePaths.add(file.getCanonicalPath()); @@ -485,7 +487,7 @@ public KnowledgeBase importRulesFile(File file, AdditionalInputParser parseFunct } /** - * Merge {@link PrefixDeclarations} into this knowledge base. + * Merge {@link PrefixDeclarationRegistry} into this knowledge base. * * @param prefixDeclarations the prefix declarations to merge. Conflicting * prefix names in {@code prefixDeclarations} will be @@ -493,7 +495,7 @@ public KnowledgeBase importRulesFile(File file, AdditionalInputParser parseFunct * * @return this */ - public KnowledgeBase mergePrefixDeclarations(PrefixDeclarations prefixDeclarations) { + public KnowledgeBase mergePrefixDeclarations(PrefixDeclarationRegistry prefixDeclarations) { this.prefixDeclarations.mergePrefixDeclarations(prefixDeclarations); return this; @@ -505,7 +507,7 @@ public KnowledgeBase mergePrefixDeclarations(PrefixDeclarations prefixDeclaratio * @return the base IRI, if declared, or {@code ""} otherwise. */ public String getBase() { - return this.prefixDeclarations.getBase(); + return this.prefixDeclarations.getBaseIri(); } /* @@ -513,7 +515,7 @@ public String getBase() { * * @return an iterator over all known prefixes. */ - public Iterator getPrefixes() { + public Iterator> getPrefixes() { return this.prefixDeclarations.iterator(); } @@ -528,7 +530,7 @@ public Iterator getPrefixes() { * @return the declared IRI for {@code prefixName}. */ public String getPrefix(String prefixName) throws PrefixDeclarationException { - return this.prefixDeclarations.getPrefix(prefixName); + return this.prefixDeclarations.getPrefixIri(prefixName); } /* diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergingPrefixDeclarationRegistryTest.java similarity index 54% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java rename to vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergingPrefixDeclarationRegistryTest.java index f515cd6db..de313a6a1 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergingPrefixDeclarationRegistryTest.java @@ -20,19 +20,15 @@ * #L% */ -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; - -import java.util.Arrays; +import static org.junit.Assert.assertEquals; import org.junit.Before; import org.junit.Test; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; -import org.semanticweb.vlog4j.core.model.implementation.MergeablePrefixDeclarations; +import org.semanticweb.vlog4j.core.model.implementation.MergingPrefixDeclarationRegistry; -public class MergeablePrefixDeclarationsTest { - private MergeablePrefixDeclarations prefixDeclarations; +public class MergingPrefixDeclarationRegistryTest { + private MergingPrefixDeclarationRegistry prefixDeclarations; private static final String BASE = "https://example.org/"; private static final String UNRELATED = "https://example.com/"; @@ -42,39 +38,39 @@ public class MergeablePrefixDeclarationsTest { @Before public void init() { - prefixDeclarations = new MergeablePrefixDeclarations(); + prefixDeclarations = new MergingPrefixDeclarationRegistry(); } @Test - public void setBase_changingBase_succeeds() { - prefixDeclarations.setBase(BASE); - assertEquals(BASE, prefixDeclarations.getBase()); - prefixDeclarations.setBase(MORE_SPECIFIC); - assertEquals(MORE_SPECIFIC, prefixDeclarations.getBase()); + public void setBaseIri_changingBase_succeeds() { + prefixDeclarations.setBaseIri(BASE); + assertEquals(BASE, prefixDeclarations.getBaseIri()); + prefixDeclarations.setBaseIri(MORE_SPECIFIC); + assertEquals(MORE_SPECIFIC, prefixDeclarations.getBaseIri()); } @Test - public void setBase_redeclareSameBase_succeeds() { - prefixDeclarations.setBase(BASE); - assertEquals(BASE, prefixDeclarations.getBase()); - prefixDeclarations.setBase(BASE); - assertEquals(BASE, prefixDeclarations.getBase()); + public void setBaseIri_redeclareSameBase_succeeds() { + prefixDeclarations.setBaseIri(BASE); + assertEquals(BASE, prefixDeclarations.getBaseIri()); + prefixDeclarations.setBaseIri(BASE); + assertEquals(BASE, prefixDeclarations.getBaseIri()); } @Test - public void absolutize_noBase_identical() { - assertEquals(RELATIVE, prefixDeclarations.absolutize(RELATIVE)); + public void absolutizeIri_noBase_identical() throws PrefixDeclarationException { + assertEquals(RELATIVE, prefixDeclarations.absolutizeIri(RELATIVE)); } @Test - public void absolutize_base_absoluteIri() { - prefixDeclarations.setBase(BASE); - assertEquals(BASE + RELATIVE, prefixDeclarations.absolutize(RELATIVE)); + public void absolutizeIri_base_absoluteIri() throws PrefixDeclarationException { + prefixDeclarations.setBaseIri(BASE); + assertEquals(BASE + RELATIVE, prefixDeclarations.absolutizeIri(RELATIVE)); } @Test - public void absolutize_absoluteIri_identical() { - assertEquals(BASE, prefixDeclarations.absolutize(BASE)); + public void absolutizeIri_absoluteIri_identical() throws PrefixDeclarationException { + assertEquals(BASE, prefixDeclarations.absolutizeIri(BASE)); } @Test(expected = PrefixDeclarationException.class) @@ -84,51 +80,42 @@ public void resolvePrefixedName_undeclaredPrefix_throws() throws PrefixDeclarati @Test public void resolvePrefixedName_knownPrefix_succeeds() throws PrefixDeclarationException { - prefixDeclarations.setPrefix("eg:", BASE); + prefixDeclarations.setPrefixIri("eg:", BASE); assertEquals(BASE + RELATIVE, prefixDeclarations.resolvePrefixedName("eg:" + RELATIVE)); } @Test public void resolvePrefixedName_unresolveAbsoluteIri_doesRoundTrip() throws PrefixDeclarationException { String prefix = "eg:"; - prefixDeclarations.setPrefix(prefix, BASE); + prefixDeclarations.setPrefixIri(prefix, BASE); String resolved = BASE + RELATIVE; String unresolved = prefixDeclarations.unresolveAbsoluteIri(resolved); assertEquals(resolved, prefixDeclarations.resolvePrefixedName(unresolved)); } @Test - public void setPrefix_redeclarePrefix_succeeds() throws PrefixDeclarationException { - prefixDeclarations.setPrefix("eg:", BASE); - prefixDeclarations.setPrefix("eg:", MORE_SPECIFIC); - assertEquals(BASE, prefixDeclarations.getPrefix("eg:")); + public void setPrefixIri_redeclarePrefix_succeeds() throws PrefixDeclarationException { + prefixDeclarations.setPrefixIri("eg:", BASE); + prefixDeclarations.setPrefixIri("eg:", MORE_SPECIFIC); + assertEquals(BASE, prefixDeclarations.getPrefixIri("eg:")); } @Test public void getFreshPrefix_registeredPrefix_returnsFreshPrefix() throws PrefixDeclarationException { String prefix = "vlog4j_generated_"; - prefixDeclarations.setPrefix(prefix + "0:", BASE + "generated/"); - prefixDeclarations.setPrefix("eg:", BASE); - prefixDeclarations.setPrefix("eg:", MORE_SPECIFIC); + prefixDeclarations.setPrefixIri(prefix + "0:", BASE + "generated/"); + prefixDeclarations.setPrefixIri("eg:", BASE); + prefixDeclarations.setPrefixIri("eg:", MORE_SPECIFIC); - assertEquals(MORE_SPECIFIC, prefixDeclarations.getPrefix(prefix + "1:")); + assertEquals(MORE_SPECIFIC, prefixDeclarations.getPrefixIri(prefix + "1:")); } @Test - public void mergeablePrefixDeclarations_constructor_succeeds() throws PrefixDeclarationException { - this.prefixDeclarations.setPrefix("eg:", MORE_SPECIFIC); - MergeablePrefixDeclarations prefixDeclarations = new MergeablePrefixDeclarations(this.prefixDeclarations); - assertEquals(MORE_SPECIFIC, prefixDeclarations.getPrefix("eg:")); - } - - @Test(expected = RuntimeException.class) - public void mergePrefixDeclarations_getPrefixUnexpectedlyThrows_throws() throws PrefixDeclarationException { - PrefixDeclarations prefixDeclarations = mock(PrefixDeclarations.class); - - when(prefixDeclarations.iterator()).thenReturn(Arrays.asList("eg:", "ex:").iterator()); - when(prefixDeclarations.getPrefix(anyString())).thenThrow(PrefixDeclarationException.class); - - this.prefixDeclarations.mergePrefixDeclarations(prefixDeclarations); + public void mergingPrefixDeclarationRegistry_constructor_succeeds() throws PrefixDeclarationException { + this.prefixDeclarations.setPrefixIri("eg:", MORE_SPECIFIC); + MergingPrefixDeclarationRegistry prefixDeclarations = new MergingPrefixDeclarationRegistry( + this.prefixDeclarations); + assertEquals(MORE_SPECIFIC, prefixDeclarations.getPrefixIri("eg:")); } @Test @@ -139,49 +126,49 @@ public void unresolveAbsoluteIri_default_identical() { @Test public void unresolveAbsoluteIri_declaredPrefix_succeeds() { assertEquals(MORE_SPECIFIC, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC)); - prefixDeclarations.setPrefix("eg:", BASE); + prefixDeclarations.setPrefixIri("eg:", BASE); assertEquals("eg:example/", prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC)); } @Test public void unresolveAbsoluteIri_unrelatedPrefix_identical() { - prefixDeclarations.setPrefix("eg:", UNRELATED); + prefixDeclarations.setPrefixIri("eg:", UNRELATED); assertEquals(MORE_SPECIFIC, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC)); } @Test public void unresolveAbsoluteIri_unrelatedAndRelatedPrefixes_succeeds() { - prefixDeclarations.setPrefix("ex:", UNRELATED); - prefixDeclarations.setPrefix("eg:", BASE); + prefixDeclarations.setPrefixIri("ex:", UNRELATED); + prefixDeclarations.setPrefixIri("eg:", BASE); assertEquals("eg:example/", prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC)); } @Test public void unresolveAbsoluteIri_multipleMatchingPrefixes_longestMatchWins() { - prefixDeclarations.setPrefix("eg:", BASE); - prefixDeclarations.setPrefix("ex:", MORE_SPECIFIC); + prefixDeclarations.setPrefixIri("eg:", BASE); + prefixDeclarations.setPrefixIri("ex:", MORE_SPECIFIC); assertEquals("ex:" + RELATIVE, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE)); - prefixDeclarations.setPrefix("er:", EVEN_MORE_SPECIFIC); + prefixDeclarations.setPrefixIri("er:", EVEN_MORE_SPECIFIC); assertEquals("er:test", prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE)); } @Test public void unresolveAbsoluteIri_exactPrefixMatch_identical() { - prefixDeclarations.setPrefix("eg:", BASE); + prefixDeclarations.setPrefixIri("eg:", BASE); assertEquals(BASE, prefixDeclarations.unresolveAbsoluteIri(BASE)); } @Test public void unresolveAbsoluteIri_baseIsMoreSpecific_baseWins() { - prefixDeclarations.setBase(MORE_SPECIFIC); - prefixDeclarations.setPrefix("eg:", BASE); + prefixDeclarations.setBaseIri(MORE_SPECIFIC); + prefixDeclarations.setPrefixIri("eg:", BASE); assertEquals(RELATIVE, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE)); } @Test public void unresolveAbsoluteIri_resolvePrefixedName_doesRoundTrip() throws PrefixDeclarationException { String prefix = "eg:"; - prefixDeclarations.setPrefix(prefix, BASE); + prefixDeclarations.setPrefixIri(prefix, BASE); String unresolved = prefix + RELATIVE; String resolved = prefixDeclarations.resolvePrefixedName(unresolved); assertEquals(unresolved, prefixDeclarations.unresolveAbsoluteIri(resolved)); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java index aed7efd89..5093d5116 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java @@ -26,7 +26,7 @@ import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; import org.semanticweb.vlog4j.core.model.api.LanguageStringConstant; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.TermType; import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; @@ -97,7 +97,7 @@ public void abstractConstantGetterTest() { @Test public void datatypeConstantGetterTest() { - DatatypeConstant c = new DatatypeConstantImpl("c", PrefixDeclarations.XSD_STRING); + DatatypeConstant c = new DatatypeConstantImpl("c", PrefixDeclarationRegistry.XSD_STRING); assertEquals("c", c.getLexicalValue()); assertEquals("http://www.w3.org/2001/XMLSchema#string", c.getDatatype()); assertEquals("\"c\"^^", c.getName()); @@ -142,7 +142,7 @@ public void abstractConstantToStringTest() { @Test public void datatypeConstantToStringTest() { - DatatypeConstantImpl c = new DatatypeConstantImpl("c", PrefixDeclarations.XSD_STRING); + DatatypeConstantImpl c = new DatatypeConstantImpl("c", PrefixDeclarationRegistry.XSD_STRING); assertEquals("\"c\"", c.toString()); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java index 29663ac17..ce862f09b 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java @@ -30,7 +30,7 @@ import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.MergeablePrefixDeclarations; +import org.semanticweb.vlog4j.core.model.implementation.MergingPrefixDeclarationRegistry; public class KnowledgeBaseTest { @@ -94,7 +94,7 @@ public void testDoRemoveStatementInexistentPredicate() { @Test public void getBase_default_hasEmptyBase() { - assertEquals(this.kb.getBase(), ""); + assertEquals("", this.kb.getBase()); } @Test(expected = PrefixDeclarationException.class) @@ -110,8 +110,8 @@ public void resolvePrefixedName_defaultUndeclaredPrefix_throws() throws PrefixDe @Test public void mergePrefixDeclarations_merge_succeeds() throws PrefixDeclarationException { String iri = "https://example.org/"; - MergeablePrefixDeclarations prefixDeclarations = new MergeablePrefixDeclarations(); - prefixDeclarations.setPrefix("ex:", iri); + MergingPrefixDeclarationRegistry prefixDeclarations = new MergingPrefixDeclarationRegistry(); + prefixDeclarations.setPrefixIri("ex:", iri); this.kb.mergePrefixDeclarations(prefixDeclarations); assertEquals(this.kb.getPrefix("ex:"), iri); assertEquals(this.kb.resolvePrefixedName("ex:test"), iri + "test"); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java index 036335328..6dff4c70f 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java @@ -6,12 +6,17 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.Arrays; +import java.util.HashMap; import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.Map.Entry; import java.util.stream.Collectors; import java.util.stream.Stream; import org.junit.Before; import org.junit.Test; +import org.mockito.internal.util.collections.Sets; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.AbstractConstant; import org.semanticweb.vlog4j.core.model.api.Conjunction; @@ -19,7 +24,7 @@ import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.UniversalVariable; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; @@ -97,10 +102,12 @@ public void writeInferences_example_succeeds() throws IOException { @Test public void writeInferences_withPrefixDeclarations_abbreviatesIris() throws IOException, PrefixDeclarationException { - PrefixDeclarations prefixDeclarations = mock(PrefixDeclarations.class); - when(prefixDeclarations.getBase()).thenReturn(""); - when(prefixDeclarations.getPrefix(eq("eg:"))).thenReturn("http://example.org/"); - when(prefixDeclarations.iterator()).thenReturn(Arrays.asList("eg:").iterator()); + PrefixDeclarationRegistry prefixDeclarations = mock(PrefixDeclarationRegistry.class); + Map prefixMap = new HashMap<>(); + prefixMap.put("eg:", "http://example.org/"); + when(prefixDeclarations.getBaseIri()).thenReturn(""); + when(prefixDeclarations.getPrefixIri(eq("eg:"))).thenReturn("http://example.org/"); + when(prefixDeclarations.iterator()).thenReturn(prefixMap.entrySet().iterator()); kb.mergePrefixDeclarations(prefixDeclarations); assertEquals(11, getInferences().size()); @@ -109,9 +116,9 @@ public void writeInferences_withPrefixDeclarations_abbreviatesIris() @Test public void writeInferences_withBase_writesBase() throws IOException, PrefixDeclarationException { - PrefixDeclarations prefixDeclarations = mock(PrefixDeclarations.class); - when(prefixDeclarations.getBase()).thenReturn("http://example.org/"); - when(prefixDeclarations.iterator()).thenReturn(Arrays.asList().iterator()); + PrefixDeclarationRegistry prefixDeclarations = mock(PrefixDeclarationRegistry.class); + when(prefixDeclarations.getBaseIri()).thenReturn("http://example.org/"); + when(prefixDeclarations.iterator()).thenReturn(new HashMap().entrySet().iterator()); kb.mergePrefixDeclarations(prefixDeclarations); assertEquals(11, getInferences().size()); diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java index b4c9cb4b3..69d772f70 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java @@ -28,7 +28,7 @@ import java.util.List; import java.util.NoSuchElementException; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.javacc.JavaCCParser; @@ -207,15 +207,15 @@ default ParserConfiguration getParserConfiguration(SubParserFactory subParserFac } /** - * Obtain {@link PrefixDeclarations} from a {@link SubParserFactory}. + * Obtain {@link PrefixDeclarationRegistry} from a {@link SubParserFactory}. * * @param subParserFactory the SubParserFactory. * * @return the prefix declarations. */ - default PrefixDeclarations getPrefixDeclarations(SubParserFactory subParserFactory) { + default PrefixDeclarationRegistry getPrefixDeclarationRegistry(SubParserFactory subParserFactory) { JavaCCParser subParser = subParserFactory.makeSubParser(""); - return subParser.getPrefixDeclarations(); + return subParser.getPrefixDeclarationRegistry(); } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarationRegistry.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarationRegistry.java new file mode 100644 index 000000000..42e014cae --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarationRegistry.java @@ -0,0 +1,94 @@ +package org.semanticweb.vlog4j.parser; + +/*- + * #%L + * vlog4j-syntax + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.vlog4j.core.model.implementation.AbstractPrefixDeclarationRegistry; + +/** + * Implementation of {@link PrefixDeclarationRegistry} that is used when parsing + * data from a single source. In this case, attempts to re-declare prefixes or + * the base IRI will lead to errors. + * + * @author Markus Kroetzsch + * + */ +final public class LocalPrefixDeclarationRegistry extends AbstractPrefixDeclarationRegistry { + + /** + * Fallback IRI to use as base IRI if none is set. + */ + private String fallbackIri; + + public LocalPrefixDeclarationRegistry() { + this(PrefixDeclarationRegistry.EMPTY_BASE); // empty string encodes: "no base" (use relative IRIs) + } + + /** + * + */ + public LocalPrefixDeclarationRegistry(String fallbackIri) { + super(); + this.fallbackIri = fallbackIri; + this.baseUri = null; + } + + /** + * Returns the relevant base namespace. Returns the fallback IRI if no base + * namespace has been set yet. + * + * @return string of an absolute base IRI + */ + @Override + public String getBaseIri() { + if (this.baseUri == null) { + this.baseUri = this.fallbackIri; + } + return baseUri.toString(); + } + + @Override + public void setPrefixIri(String prefixName, String prefixIri) throws PrefixDeclarationException { + if (prefixes.containsKey(prefixName)) { + throw new PrefixDeclarationException("Prefix \"" + prefixName + "\" is already defined as <" + + prefixes.get(prefixName) + ">. It cannot be redefined to mean <" + prefixIri + ">."); + } + + prefixes.put(prefixName, prefixIri); + } + + /** + * Sets the base namespace to the given value. This should only be done once, + * and not after the base namespace was assumed to be an implicit default value. + * + * @param baseIri the new base namespace + * @throws PrefixDeclarationException if base was already defined + */ + + @Override + public void setBaseIri(String baseUri) throws PrefixDeclarationException { + if (this.baseUri != null) + throw new PrefixDeclarationException( + "Base is already defined as <" + this.baseUri + "> and cannot be re-defined as " + baseUri); + this.baseUri = baseUri; + } +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java deleted file mode 100644 index 8bc8a93b6..000000000 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java +++ /dev/null @@ -1,118 +0,0 @@ -package org.semanticweb.vlog4j.parser; - -/*- - * #%L - * vlog4j-parser - * %% - * Copyright (C) 2018 - 2019 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.net.URI; -import java.net.URISyntaxException; -import java.util.HashMap; -import java.util.Iterator; -import java.util.Map; - -import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; - -/** - * Implementation of {@link PrefixDeclarations} that is used when parsing data - * from a single source. In this case, attempts to re-declare prefixes or the - * base IRI will lead to errors. - * - * @author Markus Kroetzsch - * - */ -final public class LocalPrefixDeclarations implements PrefixDeclarations { - - Map prefixes = new HashMap<>(); - String baseUri; - String fallbackUri; - - public LocalPrefixDeclarations() { - this(""); // empty string encodes: "no base" (use relative IRIs) - } - - public LocalPrefixDeclarations(String fallbackUri) { - this.fallbackUri = fallbackUri; - } - - public String getBase() { - if (this.baseUri == null) { - this.baseUri = this.fallbackUri; - } - return baseUri.toString(); - } - - public String getPrefix(String prefix) throws PrefixDeclarationException { - if (!prefixes.containsKey(prefix)) { - throw new PrefixDeclarationException("Prefix " + prefix + " cannot be resolved (not declared yet)."); - } - return prefixes.get(prefix).toString(); - } - - public void setPrefix(String prefix, String uri) throws PrefixDeclarationException { - if (prefixes.containsKey(prefix)) { - throw new PrefixDeclarationException("Prefix " + prefix + " is already defined as <" + prefixes.get(prefix) - + ">. It cannot be redefined to mean <" + uri + ">."); - } - - prefixes.put(prefix, uri); - } - - public void setBase(String baseUri) throws PrefixDeclarationException { - if (this.baseUri != null) - throw new PrefixDeclarationException( - "Base is already defined as <" + this.baseUri + "> and cannot be re-defined as " + baseUri); - this.baseUri = baseUri; - } - - public String resolvePrefixedName(String prefixedName) throws PrefixDeclarationException { - // from the parser we know that prefixedName is of the form: - // prefix:something - // remember that the prefixes are stored with the colon symbol - // This does not return the surrounding angle brackets <> - - int idx = prefixedName.indexOf(":") + 1; - String prefix = prefixedName.substring(0, idx); - String suffix = prefixedName.substring(idx); - - return getPrefix(prefix) + suffix; - } - - public String absolutize(String iri) throws PrefixDeclarationException { - URI relative; - - try { - relative = new URI(iri); - } catch (URISyntaxException e) { - throw new PrefixDeclarationException("Failed to parse IRI", e); - } - - if (relative.isAbsolute()) { - return iri; - } else { - return getBase() + iri; - } - } - - @Override - public Iterator iterator() { - return this.prefixes.keySet().iterator(); - } - -} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index 4015cb83b..861781378 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -28,7 +28,7 @@ import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; @@ -134,7 +134,7 @@ public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(final Strin * @return the {@link Constant} corresponding to the given arguments. */ public Constant parseDatatypeConstant(final String lexicalForm, final String datatype) throws ParsingException { - final String type = ((datatype != null) ? datatype : PrefixDeclarations.XSD_STRING); + final String type = ((datatype != null) ? datatype : PrefixDeclarationRegistry.XSD_STRING); final DatatypeConstantHandler handler = this.datatypes.get(type); if (handler != null) { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParsingException.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParsingException.java index 9261af3f9..73625b184 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParsingException.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParsingException.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,17 +20,11 @@ * #L% */ -public class ParsingException extends Exception { +import org.semanticweb.vlog4j.core.exceptions.VLog4jException; - /** - * - */ +public class ParsingException extends VLog4jException { private static final long serialVersionUID = 2849123381757026724L; - public ParsingException() { - super(); - } - public ParsingException(String message) { super(message); } @@ -46,5 +40,4 @@ public ParsingException(String message, Throwable cause) { public ParsingException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { super(message, cause, enableSuppression, writableStackTrace); } - } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index e7784822d..2fab78fd9 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -30,7 +30,7 @@ import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; @@ -61,8 +61,8 @@ public static void parseInto(final KnowledgeBase knowledgeBase, final InputStrea final JavaCCParser parser = new JavaCCParser(stream, encoding); if (baseIri != null) { - PrefixDeclarations prefixDeclarations = new LocalPrefixDeclarations(baseIri); - parser.setPrefixDeclarations(prefixDeclarations); + PrefixDeclarationRegistry prefixDeclarationRegistry = new LocalPrefixDeclarationRegistry(baseIri); + parser.setPrefixDeclarationRegistry(prefixDeclarationRegistry); } parser.setKnowledgeBase(knowledgeBase); @@ -261,7 +261,7 @@ static KnowledgeBase doParse(final JavaCCParser parser) throws ParsingException } KnowledgeBase knowledgeBase = parser.getKnowledgeBase(); - return knowledgeBase.mergePrefixDeclarations(parser.getPrefixDeclarations()); + return knowledgeBase.mergePrefixDeclarations(parser.getPrefixDeclarationRegistry()); } protected static DataSourceDeclaration parseAndExtractDatasourceDeclaration(final JavaCCParser parser) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java index 680c498e6..2ac3216d1 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java @@ -24,7 +24,7 @@ import java.io.InputStream; import java.util.List; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.DirectiveArgument; import org.semanticweb.vlog4j.parser.DirectiveHandler; @@ -40,17 +40,17 @@ */ public class ImportFileRelativeDirectiveHandler implements DirectiveHandler { @Override - public KnowledgeBase handleDirective(List arguments, final SubParserFactory subParserFactory) + public KnowledgeBase handleDirective(List arguments, SubParserFactory subParserFactory) throws ParsingException { DirectiveHandler.validateNumberOfArguments(arguments, 1); - PrefixDeclarations prefixDeclarations = getPrefixDeclarations(subParserFactory); + PrefixDeclarationRegistry prefixDeclarationRegistry = getPrefixDeclarationRegistry(subParserFactory); File file = DirectiveHandler.validateFilenameArgument(arguments.get(0), "rules file"); KnowledgeBase knowledgeBase = getKnowledgeBase(subParserFactory); ParserConfiguration parserConfiguration = getParserConfiguration(subParserFactory); try { return knowledgeBase.importRulesFile(file, (InputStream stream, KnowledgeBase kb) -> { - RuleParser.parseInto(kb, stream, parserConfiguration, prefixDeclarations.getBase()); + RuleParser.parseInto(kb, stream, parserConfiguration, prefixDeclarationRegistry.getBaseIri()); return kb; }); } catch (Exception e) { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 2043e9c7d..1440a9fd2 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -60,7 +60,7 @@ import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; @@ -318,9 +318,9 @@ Term term(FormulaContext context) throws PrefixDeclarationException : { Constant NumericLiteral() : { Token t; } { - t = < INTEGER > { return createConstant(t.image, PrefixDeclarations.XSD_INTEGER); } - | t = < DECIMAL > { return createConstant(t.image, PrefixDeclarations.XSD_DECIMAL); } - | t = < DOUBLE > { return createConstant(t.image, PrefixDeclarations.XSD_DOUBLE); } + t = < INTEGER > { return createConstant(t.image, PrefixDeclarationRegistry.XSD_INTEGER); } + | t = < DECIMAL > { return createConstant(t.image, PrefixDeclarationRegistry.XSD_DECIMAL); } + | t = < DOUBLE > { return createConstant(t.image, PrefixDeclarationRegistry.XSD_DOUBLE); } } Constant RDFLiteral() throws PrefixDeclarationException : { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 02e9e35f6..ac977898e 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -33,7 +33,7 @@ import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.NamedNull; import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; @@ -42,7 +42,7 @@ import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.DefaultParserConfiguration; import org.semanticweb.vlog4j.parser.DirectiveArgument; -import org.semanticweb.vlog4j.parser.LocalPrefixDeclarations; +import org.semanticweb.vlog4j.parser.LocalPrefixDeclarationRegistry; import org.semanticweb.vlog4j.parser.ParserConfiguration; import org.semanticweb.vlog4j.parser.ParsingException; @@ -62,7 +62,7 @@ * */ public class JavaCCParserBase { - private PrefixDeclarations prefixDeclarations; + private PrefixDeclarationRegistry prefixDeclarationRegistry; private KnowledgeBase knowledgeBase; private ParserConfiguration parserConfiguration; @@ -135,14 +135,14 @@ public enum ConfigurableLiteralDelimiter { public JavaCCParserBase() { this.knowledgeBase = new KnowledgeBase(); - this.prefixDeclarations = new LocalPrefixDeclarations(); + this.prefixDeclarationRegistry = new LocalPrefixDeclarationRegistry(); this.parserConfiguration = new DefaultParserConfiguration(); } AbstractConstant createConstant(String lexicalForm) throws ParseException { String absoluteIri; try { - absoluteIri = prefixDeclarations.absolutize(lexicalForm); + absoluteIri = absolutizeIri(lexicalForm); } catch (PrefixDeclarationException e) { throw makeParseExceptionWithCause("Failed to parse IRI", e); } @@ -334,12 +334,12 @@ void setNamedNullNamespace(byte[] namedNullNamespace) { this.namedNullNamespace = namedNullNamespace; } - public void setPrefixDeclarations(PrefixDeclarations prefixDeclarations) { - this.prefixDeclarations = prefixDeclarations; + public void setPrefixDeclarationRegistry(PrefixDeclarationRegistry prefixDeclarationRegistry) { + this.prefixDeclarationRegistry = prefixDeclarationRegistry; } - public PrefixDeclarations getPrefixDeclarations() { - return prefixDeclarations; + public PrefixDeclarationRegistry getPrefixDeclarationRegistry() { + return this.prefixDeclarationRegistry; } DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(String syntacticForm, @@ -376,18 +376,18 @@ boolean isParsingOfNamedNullsAllowed() { } void setBase(String baseIri) throws PrefixDeclarationException { - prefixDeclarations.setBase(baseIri); + prefixDeclarationRegistry.setBaseIri(baseIri); } void setPrefix(String prefixName, String baseIri) throws PrefixDeclarationException { - prefixDeclarations.setPrefix(prefixName, baseIri); + prefixDeclarationRegistry.setPrefixIri(prefixName, baseIri); } String absolutizeIri(String iri) throws PrefixDeclarationException { - return prefixDeclarations.absolutize(iri); + return prefixDeclarationRegistry.absolutizeIri(iri); } String resolvePrefixedName(String prefixedName) throws PrefixDeclarationException { - return prefixDeclarations.resolvePrefixedName(prefixedName); + return prefixDeclarationRegistry.resolvePrefixedName(prefixedName); } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java index 1f1269898..2332afa83 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java @@ -24,7 +24,7 @@ import java.io.InputStream; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.ParserConfiguration; import org.semanticweb.vlog4j.parser.RuleParser; @@ -40,7 +40,7 @@ public class SubParserFactory { private final KnowledgeBase knowledgeBase; private final ParserConfiguration parserConfiguration; - private final PrefixDeclarations prefixDeclarations; + private final PrefixDeclarationRegistry prefixDeclarationRegistry; private final byte[] namedNullNamespace; /** @@ -50,7 +50,7 @@ public class SubParserFactory { */ SubParserFactory(final JavaCCParser parser) { this.knowledgeBase = parser.getKnowledgeBase(); - this.prefixDeclarations = parser.getPrefixDeclarations(); + this.prefixDeclarationRegistry = parser.getPrefixDeclarationRegistry(); this.parserConfiguration = parser.getParserConfiguration(); this.namedNullNamespace = parser.getNamedNullNamespace(); } @@ -67,7 +67,7 @@ public class SubParserFactory { public JavaCCParser makeSubParser(final InputStream inputStream, final String encoding) { final JavaCCParser subParser = new JavaCCParser(inputStream, encoding); subParser.setKnowledgeBase(this.knowledgeBase); - subParser.setPrefixDeclarations(this.prefixDeclarations); + subParser.setPrefixDeclarationRegistry(this.prefixDeclarationRegistry); subParser.setParserConfiguration(this.parserConfiguration); subParser.setNamedNullNamespace(this.namedNullNamespace); diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserParseFactTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserParseFactTest.java index e2a56b1ce..b09d918f9 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserParseFactTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserParseFactTest.java @@ -27,7 +27,7 @@ import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.NamedNull; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; @@ -37,8 +37,8 @@ public class RuleParserParseFactTest implements ParserTestUtils { - private final Constant a = Expressions.makeDatatypeConstant("a", PrefixDeclarations.XSD_STRING); - private final Constant b = Expressions.makeDatatypeConstant("b", PrefixDeclarations.XSD_STRING); + private final Constant a = Expressions.makeDatatypeConstant("a", PrefixDeclarationRegistry.XSD_STRING); + private final Constant b = Expressions.makeDatatypeConstant("b", PrefixDeclarationRegistry.XSD_STRING); private final Fact factA = Expressions.makeFact("p", a); private final Fact factAB = Expressions.makeFact("p", a, b); diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java index 25243d0cb..f6b143182 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java @@ -34,7 +34,7 @@ import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.Term; @@ -51,7 +51,7 @@ public class RuleParserTest implements ParserTestUtils { private final Variable z = Expressions.makeUniversalVariable("Z"); private final Constant c = Expressions.makeAbstractConstant("http://example.org/c"); private final Constant d = Expressions.makeAbstractConstant("http://example.org/d"); - private final Constant abc = Expressions.makeDatatypeConstant("abc", PrefixDeclarations.XSD_STRING); + private final Constant abc = Expressions.makeDatatypeConstant("abc", PrefixDeclarationRegistry.XSD_STRING); private final Literal atom1 = Expressions.makePositiveLiteral("http://example.org/p", x, c); private final Literal negAtom1 = Expressions.makeNegativeLiteral("http://example.org/p", x, c); private final Literal atom2 = Expressions.makePositiveLiteral("http://example.org/p", x, z); @@ -188,24 +188,24 @@ public void testNoDollarVariables() throws ParsingException { public void testIntegerLiteral() throws ParsingException { String input = "p(42)"; PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", - Expressions.makeDatatypeConstant("42", PrefixDeclarations.XSD_INTEGER)); + Expressions.makeDatatypeConstant("42", PrefixDeclarationRegistry.XSD_INTEGER)); assertEquals(integerLiteral, RuleParser.parseLiteral(input)); } @Test public void testAbbreviatedIntegerLiteral() throws ParsingException { - String input = "@prefix xsd: <" + PrefixDeclarations.XSD + "> . " + "p(\"42\"^^xsd:integer) ."; + String input = "@prefix xsd: <" + PrefixDeclarationRegistry.XSD + "> . " + "p(\"42\"^^xsd:integer) ."; ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", - Expressions.makeDatatypeConstant("42", PrefixDeclarations.XSD_INTEGER)); + Expressions.makeDatatypeConstant("42", PrefixDeclarationRegistry.XSD_INTEGER)); assertEquals(Arrays.asList(integerLiteral), statements); } @Test public void testFullIntegerLiteral() throws ParsingException { - String input = "p(\"42\"^^<" + PrefixDeclarations.XSD_INTEGER + "> )"; + String input = "p(\"42\"^^<" + PrefixDeclarationRegistry.XSD_INTEGER + "> )"; PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", - Expressions.makeDatatypeConstant("42", PrefixDeclarations.XSD_INTEGER)); + Expressions.makeDatatypeConstant("42", PrefixDeclarationRegistry.XSD_INTEGER)); assertEquals(integerLiteral, RuleParser.parseLiteral(input)); } @@ -213,7 +213,7 @@ public void testFullIntegerLiteral() throws ParsingException { public void testDecimalLiteral() throws ParsingException { String input = "p(-5.0)"; PositiveLiteral decimalLiteral = Expressions.makePositiveLiteral("p", - Expressions.makeDatatypeConstant("-5.0", PrefixDeclarations.XSD_DECIMAL)); + Expressions.makeDatatypeConstant("-5.0", PrefixDeclarationRegistry.XSD_DECIMAL)); assertEquals(decimalLiteral, RuleParser.parseLiteral(input)); } @@ -221,7 +221,7 @@ public void testDecimalLiteral() throws ParsingException { public void testDoubleLiteral() throws ParsingException { String input = "p(4.2E9)"; PositiveLiteral doubleLiteral = Expressions.makePositiveLiteral("p", - Expressions.makeDatatypeConstant("4.2E9", PrefixDeclarations.XSD_DOUBLE)); + Expressions.makeDatatypeConstant("4.2E9", PrefixDeclarationRegistry.XSD_DOUBLE)); assertEquals(doubleLiteral, RuleParser.parseLiteral(input)); } @@ -241,14 +241,14 @@ public void testIncompleteStringLiteral() throws ParsingException { public void parseLiteral_escapeSequences_succeeds() throws ParsingException { String input = "p(\"_\\\"_\\\\_\\n_\\t_\")"; // User input: p("_\"_\\_\n_\t_") PositiveLiteral fact = Expressions.makePositiveLiteral("p", - Expressions.makeDatatypeConstant("_\"_\\_\n_\t_", PrefixDeclarations.XSD_STRING)); + Expressions.makeDatatypeConstant("_\"_\\_\n_\t_", PrefixDeclarationRegistry.XSD_STRING)); assertEquals(fact, RuleParser.parseLiteral(input)); } @Test public void parseLiteral_escapeSequences_roundTrips() throws ParsingException { PositiveLiteral fact = Expressions.makePositiveLiteral("p", - Expressions.makeDatatypeConstant("_\"_\\_\n_\t_", PrefixDeclarations.XSD_STRING)); + Expressions.makeDatatypeConstant("_\"_\\_\n_\t_", PrefixDeclarationRegistry.XSD_STRING)); assertEquals(fact, RuleParser.parseLiteral(fact.getSyntacticRepresentation())); } @@ -257,14 +257,14 @@ public void parseLiteral_allEscapeSequences_succeeds() throws ParsingException { // User input: p("_\n_\t_\r_\b_\f_\'_\"_\\_") String input = "p(\"_\\n_\\t_\\r_\\b_\\f_\\'_\\\"_\\\\_\")"; PositiveLiteral fact = Expressions.makePositiveLiteral("p", - Expressions.makeDatatypeConstant("_\n_\t_\r_\b_\f_\'_\"_\\_", PrefixDeclarations.XSD_STRING)); + Expressions.makeDatatypeConstant("_\n_\t_\r_\b_\f_\'_\"_\\_", PrefixDeclarationRegistry.XSD_STRING)); assertEquals(fact, RuleParser.parseLiteral(input)); } @Test public void parseLiteral_allEscapeSequences_roundTrips() throws ParsingException { PositiveLiteral fact = Expressions.makePositiveLiteral("p", - Expressions.makeDatatypeConstant("_\n_\t_\r_\b_\f_\'_\"_\\_", PrefixDeclarations.XSD_STRING)); + Expressions.makeDatatypeConstant("_\n_\t_\r_\b_\f_\'_\"_\\_", PrefixDeclarationRegistry.XSD_STRING)); assertEquals(fact, RuleParser.parseLiteral(fact.getSyntacticRepresentation())); } @@ -284,14 +284,14 @@ public void parseLiteral_incompleteEscapeAtEndOfLiteral_throws() throws ParsingE public void parseLiteral_multiLineLiteral_succeeds() throws ParsingException { String input = "p('''line 1\n\n" + "line 2\n" + "line 3''')"; // User input: p("a\"b\\c") PositiveLiteral fact = Expressions.makePositiveLiteral("p", - Expressions.makeDatatypeConstant("line 1\n\nline 2\nline 3", PrefixDeclarations.XSD_STRING)); + Expressions.makeDatatypeConstant("line 1\n\nline 2\nline 3", PrefixDeclarationRegistry.XSD_STRING)); assertEquals(fact, RuleParser.parseLiteral(input)); } @Test public void parseLiteral_multiLineLiteral_roundTrips() throws ParsingException { PositiveLiteral fact = Expressions.makePositiveLiteral("p", - Expressions.makeDatatypeConstant("line 1\n\nline 2\nline 3", PrefixDeclarations.XSD_STRING)); + Expressions.makeDatatypeConstant("line 1\n\nline 2\nline 3", PrefixDeclarationRegistry.XSD_STRING)); assertEquals(fact, RuleParser.parseLiteral(fact.getSyntacticRepresentation())); } @@ -322,7 +322,7 @@ public void testUnicodeUri() throws ParsingException { @Test public void testPrefixedLiteral() throws ParsingException { - String input = "@prefix xsd: <" + PrefixDeclarations.XSD + "> . " + "p(\"abc\"^^xsd:string) ."; + String input = "@prefix xsd: <" + PrefixDeclarationRegistry.XSD + "> . " + "p(\"abc\"^^xsd:string) ."; ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); assertEquals(Arrays.asList(fact2), statements); } diff --git a/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfValueToTermConverter.java b/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfValueToTermConverter.java index dfcbc9090..361da7991 100644 --- a/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfValueToTermConverter.java +++ b/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfValueToTermConverter.java @@ -26,7 +26,7 @@ import org.openrdf.model.Value; import org.openrdf.model.datatypes.XMLDatatypeUtil; import org.openrdf.rio.ntriples.NTriplesUtil; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; @@ -68,7 +68,7 @@ static Term rdfLiteralToConstant(final Literal literal) { } else if (literal.getLanguage() != null) { return new LanguageStringConstantImpl(literal.getLabel(), literal.getLanguage()); } else { - return new DatatypeConstantImpl(literal.getLabel(), PrefixDeclarations.XSD_STRING); + return new DatatypeConstantImpl(literal.getLabel(), PrefixDeclarationRegistry.XSD_STRING); } } diff --git a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestConvertRdfFileToFacts.java b/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestConvertRdfFileToFacts.java index 1b61bc02c..b5ab85281 100644 --- a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestConvertRdfFileToFacts.java +++ b/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestConvertRdfFileToFacts.java @@ -41,7 +41,7 @@ import org.openrdf.rio.RDFHandlerException; import org.openrdf.rio.RDFParseException; import org.semanticweb.vlog4j.core.model.api.NamedNull; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Term; @@ -178,7 +178,7 @@ public void testCollectionsPreserved() throws RDFHandlerException, RDFParseExcep Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, Arrays.asList(file2, fileA, blank1)), Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, Arrays.asList(blank1, RDF_FIRST, - Expressions.makeDatatypeConstant("1", PrefixDeclarations.XSD_INTEGER))), + Expressions.makeDatatypeConstant("1", PrefixDeclarationRegistry.XSD_INTEGER))), Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, Arrays.asList(blank1, RDF_REST, RDF_NIL)), Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, Arrays.asList(file3, fileA, blank2)), Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, From 3c32a6b1d7edbe20952864e82e383200aed39f93 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 28 Feb 2020 20:03:43 +0100 Subject: [PATCH 0562/1003] Address review comments --- .../PrefixDeclarationException.java | 2 +- .../core/model/api/ExistentialVariable.java | 7 -- .../model/api/PrefixDeclarationRegistry.java | 2 +- .../vlog4j/core/model/api/Term.java | 2 - .../core/model/api/UniversalVariable.java | 7 -- .../vlog4j/core/model/api/Variable.java | 10 +- .../MergingPrefixDeclarationRegistry.java | 8 +- .../implementation/RenamedNamedNull.java | 11 ++- .../core/model/implementation/Serializer.java | 66 +++++++++---- .../vlog4j/core/reasoner/KnowledgeBase.java | 97 +++++++++---------- .../vlog4j/core/reasoner/Reasoner.java | 2 +- .../MergingPrefixDeclarationRegistryTest.java | 12 +++ .../core/reasoner/KnowledgeBaseTest.java | 6 +- .../LocalPrefixDeclarationRegistry.java | 2 +- .../semanticweb/vlog4j/parser/RuleParser.java | 4 +- .../ImportFileDirectiveHandler.java | 5 +- .../ImportFileRelativeDirectiveHandler.java | 5 +- 17 files changed, 136 insertions(+), 112 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java index 78cb523c1..6424a028b 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java @@ -21,7 +21,7 @@ */ public class PrefixDeclarationException extends VLog4jException { - private static final long serialVersionUID = 1L; + private static final long serialVersionUID = 787997047134745982L; public PrefixDeclarationException(String errorMessage) { super(errorMessage); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/ExistentialVariable.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/ExistentialVariable.java index c62b7ce1b..6952de6c3 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/ExistentialVariable.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/ExistentialVariable.java @@ -20,8 +20,6 @@ * #L% */ -import java.util.function.Function; - import org.semanticweb.vlog4j.core.model.implementation.Serializer; /** @@ -41,9 +39,4 @@ default TermType getType() { default String getSyntacticRepresentation() { return Serializer.getString(this); } - - @Override - default String getSyntacticRepresentation(Function iriTransformer) { - return getSyntacticRepresentation(); - } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarationRegistry.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarationRegistry.java index ae4c1f6c4..874889211 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarationRegistry.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarationRegistry.java @@ -89,7 +89,7 @@ public interface PrefixDeclarationRegistry extends Iterable iriTransformer) { - return getSyntacticRepresentation(); - } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Variable.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Variable.java index 63b642a93..308843bdf 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Variable.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Variable.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,6 +20,8 @@ * #L% */ +import java.util.function.Function; + /** * Interface for variables, i.e., terms of type * {@link TermType#UNIVERSAL_VARIABLE} and @@ -30,4 +32,8 @@ * @author Markus Krötzsch */ public interface Variable extends Term { + @Override + default String getSyntacticRepresentation(Function iriTransformer) { + return getSyntacticRepresentation(); + } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergingPrefixDeclarationRegistry.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergingPrefixDeclarationRegistry.java index ac832aae5..bf1d14f8d 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergingPrefixDeclarationRegistry.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergingPrefixDeclarationRegistry.java @@ -125,17 +125,13 @@ public String unresolveAbsoluteIri(String iri) { * * @param other the set of prefix declarations to merge. Conflicting prefixes * from {@code other} will be renamed. - * - * @return this */ - public MergingPrefixDeclarationRegistry mergePrefixDeclarations(final PrefixDeclarationRegistry other) { + public void mergePrefixDeclarations(final PrefixDeclarationRegistry other) { this.setBaseIri(other.getBaseIri()); for (Entry prefix : other) { - setPrefixIri(prefix.getKey(), prefix.getValue()); + this.setPrefixIri(prefix.getKey(), prefix.getValue()); } - - return this; } private String getFreshPrefix() { diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RenamedNamedNull.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RenamedNamedNull.java index af1861859..ef05b14d2 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RenamedNamedNull.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RenamedNamedNull.java @@ -31,11 +31,12 @@ * @author Maximilian Marx */ public class RenamedNamedNull extends NamedNullImpl { - private RenamedNamedNull(String name) { - super(name); - } - + /** + * Construct a new renamed named null, with the given UUID as a name. + * + * @param name the name of the named null. + */ public RenamedNamedNull(UUID name) { - this(name.toString()); + super(name.toString()); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index ea03e05c3..2d10dc4a5 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -155,23 +155,25 @@ public static String getFactString(final Fact fact) { } /** - * Creates a String representation of a given {@link Constant}. + * Creates a String representation of a given {@link AbstractConstant}. * * @see Rule syntax . - * @param constant a {@link Constant} + * @param constant a {@link AbstractConstant} * @param iriTransformer a function to transform IRIs with. - * @return String representation corresponding to a given {@link Constant}. + * @return String representation corresponding to a given + * {@link AbstractConstant}. */ public static String getString(final AbstractConstant constant, Function iriTransformer) { return getIRIString(constant.getName(), iriTransformer); } /** - * Creates a String representation of a given {@link Constant}. + * Creates a String representation of a given {@link AbstractConstant}. * * @see Rule syntax . - * @param constant a {@link Constant} - * @return String representation corresponding to a given {@link Constant}. + * @param constant a {@link AbstractConstant} + * @return String representation corresponding to a given + * {@link AbstractConstant}. */ public static String getString(final AbstractConstant constant) { return getIRIString(constant.getName()); @@ -191,8 +193,20 @@ public static String getConstantName(final LanguageStringConstant languageString } /** - * Creates a String representation corresponding to the name of a given - * {@link DatatypeConstant} without an IRI. + * Creates a String representation corresponding to the given + * {@link DatatypeConstant}. For datatypes that have specialised lexical + * representations (i.e., xsd:String, xsd:Decimal, xsd:Integer, and xsd:Double), + * this representation is returned, otherwise the result is a generic literal + * with full datatype IRI. + * + * examples: + *
            + *
          • {@code "string"^^xsd:String} results in {@code "string"},
          • + *
          • {@code "23.0"^^xsd:Decimal} results in {@code 23.0},
          • + *
          • {@code "42"^^xsd:Integer} results in {@code 42},
          • + *
          • {@code "23.42"^^xsd:Double} results in {@code 23.42E0}, and
          • + *
          • {@code "test"^^} results in {@code "test"^^}, modulo transformation of the datatype IRI.
          • + *
          * * @see Rule syntax . * @param datatypeConstant a {@link DatatypeConstant} @@ -207,16 +221,26 @@ public static String getString(final DatatypeConstant datatypeConstant, Function || datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_INTEGER) || datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_DOUBLE)) { return datatypeConstant.getLexicalValue(); - } else { - return getConstantName(datatypeConstant, iriTransformer); } + + return getConstantName(datatypeConstant, iriTransformer); } /** - * Creates a String representation corresponding to the name of a given - * {@link DatatypeConstant} without an IRI. + * Creates a String representation corresponding to the given + * {@link DatatypeConstant}. For datatypes that have specialised lexical + * representations (i.e., xsd:String, xsd:Decimal, xsd:Integer, and xsd:Double), + * this representation is returned, otherwise the result is a generic literal + * with full datatype IRI. * - * @see Rule syntax . + * examples: + *
            + *
          • {@code "string"^^xsd:String} results in {@code "string"},
          • + *
          • {@code "23.0"^^xsd:Decimal} results in {@code 23.0},
          • + *
          • {@code "42"^^xsd:Integer} results in {@code 42},
          • + *
          • {@code "23.42"^^xsd:Double} results in {@code 23.42E0}, and
          • + *
          • {@code "test"^^} results in {@code "test"^^}.
          • + *
          * @param datatypeConstant a {@link DatatypeConstant} * @return String representation corresponding to a given * {@link DatatypeConstant}. @@ -376,9 +400,9 @@ private static String getIRIString(final String string, Function if (string.contains(COLON) || string.matches(REGEX_INTEGER) || string.matches(REGEX_DOUBLE) || string.matches(REGEX_DECIMAL) || string.equals(REGEX_TRUE) || string.equals(REGEX_FALSE)) { return addAngleBrackets(string); - } else { - return string; } + + return string; } /** @@ -394,7 +418,7 @@ private static String getIRIString(final String string, Function *
        • {@code \n}
        • *
        • {@code \r}
        • *
        • {@code \f}
        • - *
            + *
          * Example for {@code string = "\\a"}, the returned value is * {@code string = "\"\\\\a\""} * @@ -416,7 +440,7 @@ public static String getString(final String string) { *
        • {@code \n}
        • *
        • {@code \r}
        • *
        • {@code \f}
        • - *
            + *
          * * @param string * @return an escaped string @@ -466,12 +490,12 @@ public static String getString(Predicate predicate, List terms, Function { private final Set listeners = new HashSet<>(); /** - * all (canonical) file paths imported so far, used to prevent cyclic imports. + * All (canonical) file paths imported so far, used to prevent cyclic imports. */ private final Set importedFilePaths = new HashSet<>(); @@ -176,7 +176,7 @@ public Void visit(final DataSourceDeclaration statement) { * base. We try to preserve user-provided prefixes found in files when loading * data. */ - private MergingPrefixDeclarationRegistry prefixDeclarations = new MergingPrefixDeclarationRegistry(); + private MergingPrefixDeclarationRegistry prefixDeclarationRegistry = new MergingPrefixDeclarationRegistry(); /** * Index structure that organises all facts by their predicate. @@ -205,7 +205,6 @@ public void addListener(final KnowledgeBaseListener listener) { */ public void deleteListener(final KnowledgeBaseListener listener) { this.listeners.remove(listener); - } /** @@ -457,7 +456,7 @@ Map> getFactsByPredicate() { */ @FunctionalInterface public interface AdditionalInputParser { - KnowledgeBase parseInto(InputStream stream, KnowledgeBase kb) throws IOException, VLog4jException; + void parseInto(InputStream stream, KnowledgeBase kb) throws IOException, VLog4jException; } /** @@ -471,10 +470,8 @@ public interface AdditionalInputParser { * @throws IllegalArgumentException when {@code file} is null or has already * been imported * @throws VLog4jException when parseFunction throws VLog4jException - * - * @return this */ - public KnowledgeBase importRulesFile(File file, AdditionalInputParser parseFunction) + public void importRulesFile(File file, AdditionalInputParser parseFunction) throws VLog4jException, IOException, IllegalArgumentException { Validate.notNull(file, "file must not be null"); @@ -482,41 +479,40 @@ public KnowledgeBase importRulesFile(File file, AdditionalInputParser parseFunct Validate.isTrue(isNewFile, "file \"" + file.getName() + "\" was already imported."); try (InputStream stream = new FileInputStream(file)) { - return parseFunction.parseInto(stream, this); + parseFunction.parseInto(stream, this); } } /** * Merge {@link PrefixDeclarationRegistry} into this knowledge base. * - * @param prefixDeclarations the prefix declarations to merge. Conflicting - * prefix names in {@code prefixDeclarations} will be - * renamed. - * - * @return this + * @param prefixDeclarationRegistry the prefix declarations to merge. + * Conflicting prefix names in + * {@code prefixDeclarationRegistry} will be + * renamed to some implementation-specific, + * fresh prefix name. */ - public KnowledgeBase mergePrefixDeclarations(PrefixDeclarationRegistry prefixDeclarations) { - this.prefixDeclarations.mergePrefixDeclarations(prefixDeclarations); - - return this; + public void mergePrefixDeclarations(PrefixDeclarationRegistry prefixDeclarationRegistry) { + this.prefixDeclarationRegistry.mergePrefixDeclarations(prefixDeclarationRegistry); } /** * Return the base IRI. * - * @return the base IRI, if declared, or {@code ""} otherwise. + * @return the base IRI, if declared, or + * {@link PrefixDeclarationRegistry#EMPTY_BASE} otherwise. */ - public String getBase() { - return this.prefixDeclarations.getBaseIri(); + public String getBaseIri() { + return this.prefixDeclarationRegistry.getBaseIri(); } - /* + /** * Return the declared prefixes. * * @return an iterator over all known prefixes. */ public Iterator> getPrefixes() { - return this.prefixDeclarations.iterator(); + return this.prefixDeclarationRegistry.iterator(); } /** @@ -529,12 +525,13 @@ public Iterator> getPrefixes() { * * @return the declared IRI for {@code prefixName}. */ - public String getPrefix(String prefixName) throws PrefixDeclarationException { - return this.prefixDeclarations.getPrefixIri(prefixName); + public String getPrefixIri(String prefixName) throws PrefixDeclarationException { + return this.prefixDeclarationRegistry.getPrefixIri(prefixName); } - /* - * Resolve a prefixed name into an absolute IRI. Dual to unresolveAbsoluteIri. + /** + * Resolve a prefixed name into an absolute IRI. Dual to + * {@link unresolveAbsoluteIri}. * * @param prefixedName the prefixed name to resolve. * @@ -543,12 +540,12 @@ public String getPrefix(String prefixName) throws PrefixDeclarationException { * @return an absolute IRI corresponding to the prefixed name. */ public String resolvePrefixedName(String prefixedName) throws PrefixDeclarationException { - return this.prefixDeclarations.resolvePrefixedName(prefixedName); + return this.prefixDeclarationRegistry.resolvePrefixedName(prefixedName); } /** * Potentially abbreviate an absolute IRI using the declared prefixes. Dual to - * resolvePrefixedName. + * {@link resolvePrefixedName}. * * @param iri the absolute IRI to abbreviate. * @@ -556,6 +553,6 @@ public String resolvePrefixedName(String prefixedName) throws PrefixDeclarationE * declared prefixes, or {@code iri} if no suitable prefix is declared. */ public String unresolveAbsoluteIri(String iri) { - return this.prefixDeclarations.unresolveAbsoluteIri(iri); + return this.prefixDeclarationRegistry.unresolveAbsoluteIri(iri); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 968c53b51..444d4c615 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -93,7 +93,7 @@ static Reasoner getInstance() { * Exports all the (explicit and implicit) facts inferred during reasoning of * the knowledge base to an OutputStream. * - * @param an OutputStream for the facts to be written to. + * @param stream an OutputStream for the facts to be written to. * @return the correctness of the query answers, depending on the state of the * reasoning (materialisation) and its {@link KnowledgeBase}. * @throws IOException diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergingPrefixDeclarationRegistryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergingPrefixDeclarationRegistryTest.java index de313a6a1..5aad3d706 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergingPrefixDeclarationRegistryTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergingPrefixDeclarationRegistryTest.java @@ -25,6 +25,7 @@ import org.junit.Before; import org.junit.Test; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.vlog4j.core.model.implementation.MergingPrefixDeclarationRegistry; public class MergingPrefixDeclarationRegistryTest { @@ -118,6 +119,17 @@ public void mergingPrefixDeclarationRegistry_constructor_succeeds() throws Prefi assertEquals(MORE_SPECIFIC, prefixDeclarations.getPrefixIri("eg:")); } + @Test + public void mergePrefixDeclarations_conflictingPrefixName_renamesConflictingPrefixName() + throws PrefixDeclarationException { + this.prefixDeclarations.setPrefixIri("eg:", BASE); + PrefixDeclarationRegistry prefixDeclarations = new MergingPrefixDeclarationRegistry(); + prefixDeclarations.setPrefixIri("eg:", MORE_SPECIFIC); + this.prefixDeclarations.mergePrefixDeclarations(prefixDeclarations); + assertEquals(BASE, this.prefixDeclarations.getPrefixIri("eg:")); + assertEquals(MORE_SPECIFIC, this.prefixDeclarations.getPrefixIri("vlog4j_generated_0:")); + } + @Test public void unresolveAbsoluteIri_default_identical() { assertEquals(BASE, prefixDeclarations.unresolveAbsoluteIri(BASE)); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java index ce862f09b..8e0531a67 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java @@ -94,12 +94,12 @@ public void testDoRemoveStatementInexistentPredicate() { @Test public void getBase_default_hasEmptyBase() { - assertEquals("", this.kb.getBase()); + assertEquals("", this.kb.getBaseIri()); } @Test(expected = PrefixDeclarationException.class) public void getPrefix_defaultUndeclaredPrefix_throws() throws PrefixDeclarationException { - this.kb.getPrefix("ex:"); + this.kb.getPrefixIri("ex:"); } @Test(expected = PrefixDeclarationException.class) @@ -113,7 +113,7 @@ public void mergePrefixDeclarations_merge_succeeds() throws PrefixDeclarationExc MergingPrefixDeclarationRegistry prefixDeclarations = new MergingPrefixDeclarationRegistry(); prefixDeclarations.setPrefixIri("ex:", iri); this.kb.mergePrefixDeclarations(prefixDeclarations); - assertEquals(this.kb.getPrefix("ex:"), iri); + assertEquals(this.kb.getPrefixIri("ex:"), iri); assertEquals(this.kb.resolvePrefixedName("ex:test"), iri + "test"); assertEquals(this.kb.unresolveAbsoluteIri(iri + "test"), "ex:test"); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarationRegistry.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarationRegistry.java index 42e014cae..e5e3ddba3 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarationRegistry.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarationRegistry.java @@ -80,7 +80,7 @@ public void setPrefixIri(String prefixName, String prefixIri) throws PrefixDecla * Sets the base namespace to the given value. This should only be done once, * and not after the base namespace was assumed to be an implicit default value. * - * @param baseIri the new base namespace + * @param baseUri the new base namespace * @throws PrefixDeclarationException if base was already defined */ diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index 2fab78fd9..85152fc8a 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -261,7 +261,9 @@ static KnowledgeBase doParse(final JavaCCParser parser) throws ParsingException } KnowledgeBase knowledgeBase = parser.getKnowledgeBase(); - return knowledgeBase.mergePrefixDeclarations(parser.getPrefixDeclarationRegistry()); + knowledgeBase.mergePrefixDeclarations(parser.getPrefixDeclarationRegistry()); + + return knowledgeBase; } protected static DataSourceDeclaration parseAndExtractDatasourceDeclaration(final JavaCCParser parser) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileDirectiveHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileDirectiveHandler.java index af494d37c..f578a7c7b 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileDirectiveHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileDirectiveHandler.java @@ -48,12 +48,13 @@ public KnowledgeBase handleDirective(List arguments, final Su ParserConfiguration parserConfiguration = getParserConfiguration(subParserFactory); try { - return knowledgeBase.importRulesFile(file, (InputStream stream, KnowledgeBase kb) -> { + knowledgeBase.importRulesFile(file, (InputStream stream, KnowledgeBase kb) -> { RuleParser.parseInto(kb, stream, parserConfiguration); - return kb; }); } catch (Exception e) { throw new ParsingException("Failed while trying to import rules file \"" + file.getName() + "\"", e); } + + return knowledgeBase; } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java index 2ac3216d1..ca6610a18 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java @@ -49,12 +49,13 @@ public KnowledgeBase handleDirective(List arguments, SubParse ParserConfiguration parserConfiguration = getParserConfiguration(subParserFactory); try { - return knowledgeBase.importRulesFile(file, (InputStream stream, KnowledgeBase kb) -> { + knowledgeBase.importRulesFile(file, (InputStream stream, KnowledgeBase kb) -> { RuleParser.parseInto(kb, stream, parserConfiguration, prefixDeclarationRegistry.getBaseIri()); - return kb; }); } catch (Exception e) { throw new ParsingException("Failed while trying to import rules file \"" + file.getName() + "\"", e); } + + return knowledgeBase; } } From 6b2237713f60405efdaa3bf0ae55189a391f8eeb Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 28 Feb 2020 20:56:00 +0100 Subject: [PATCH 0563/1003] Core: Drop unused imports --- .../vlog4j/core/reasoner/implementation/VLogReasoner.java | 3 --- 1 file changed, 3 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index ebbf50ad1..cdec467b5 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -1,7 +1,5 @@ package org.semanticweb.vlog4j.core.reasoner.implementation; -import java.io.FileNotFoundException; -import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.util.ArrayList; @@ -10,7 +8,6 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.function.Function; import org.apache.commons.lang3.Validate; import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; From 940c3a4392c77117d62cbc1e306cd7a00b53d611 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Mar 2020 13:09:50 +0100 Subject: [PATCH 0564/1003] Update release notes --- RELEASE-NOTES.md | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 5a1e07412..6fb2310fa 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -6,11 +6,25 @@ VLog4j v0.6.0 Breaking changes: * In the example package, `ExamplesUtils.getQueryAnswerCount(queryString, reasoner)` does no - longer exist. It can be replaced by + longer exist. It can be replaced by `reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral(queryString)).getCount()` New features: * Counting query answers is more efficient now, using `Reasoner.countQueryAnswers()` +* All inferred facts can be serialized to a file using `Reasoner.writeInferences()` +* Rules files may import other rules files using `@import` and + `@import-relative`, where the latter resolves relative IRIs using + the current base IRI, unless the imported file explicitly specifies + a different one. +* Named nulls of the form `_:name` are now allowed during parsing (but + may not occur in rule bodies). They are renamed to assure that they + are distinct on a per-file level. +* The parser allows custom directives to be implemented, and a certain + set of delimiters allows for custom literal expressions. + +Other improvements: +* Prefix declarations are now kept as part of the Knowledge Base and + are used to abbreviate names when exporting inferences. VLog4j v0.5.0 From 70d53373255d8da8fc24e61a4ea8f4372423343a Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Mar 2020 14:04:26 +0100 Subject: [PATCH 0565/1003] Core: Move skolemization into core --- .../implementation/Skolemization.java | 61 +++++++++++++++ .../implementation/SkolemizationTest.java | 76 +++++++++++++++++++ .../parser/javacc/JavaCCParserBase.java | 23 ++---- .../parser/javacc/SubParserFactory.java | 7 +- 4 files changed, 149 insertions(+), 18 deletions(-) create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/Skolemization.java create mode 100644 vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SkolemizationTest.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/Skolemization.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/Skolemization.java new file mode 100644 index 000000000..2a6269ebb --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/Skolemization.java @@ -0,0 +1,61 @@ +package org.semanticweb.vlog4j.core.reasoner.implementation; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.UUID; + +import org.semanticweb.vlog4j.core.model.api.NamedNull; +import org.semanticweb.vlog4j.core.model.implementation.RenamedNamedNull; + +/** + * A class that implements skolemization of named null names. The same + * name should be skolemized to the same {@link NamedNull} when + * skolemized using the same instance, but to different instances of + * {@link NamedNull} when skolemized using different instances of + * {@link Skolemization}. + * + * @author Maximilian Marx + */ +public class Skolemization { + /** + * The namespace to use for skolemizing named null names. + */ + private final byte[] namedNullNamespace = UUID.randomUUID().toString().getBytes(); + + /** + * Skolemize a named null name. The same {@code name} will map to + * a {@link RenamedNamedNull} instance with the same name when + * called on the same instance. + * + * @throws IOException when ByteArrayOutputStream throws. + * @return a {@link RenamedNamedNull} instance with a new name + * that is specific to this instance and {@code name}. + */ + public RenamedNamedNull skolemizeNamedNull(String name) throws IOException { + ByteArrayOutputStream stream = new ByteArrayOutputStream(); + stream.write(namedNullNamespace); + stream.write(name.getBytes()); + + return new RenamedNamedNull(UUID.nameUUIDFromBytes(stream.toByteArray())); + } +} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SkolemizationTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SkolemizationTest.java new file mode 100644 index 000000000..bebe220d2 --- /dev/null +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SkolemizationTest.java @@ -0,0 +1,76 @@ +package org.semanticweb.vlog4j.core.reasoner.implementation; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.IOException; + +import org.junit.Before; +import org.junit.Test; +import org.semanticweb.vlog4j.core.model.api.NamedNull; + +public class SkolemizationTest { + private Skolemization skolemization; + private final static String name1 = "_:1"; + private final static String name2 = "_:2"; + + @Before + public void init() { + this.skolemization = new Skolemization(); + } + + @Test + public void skolemizeNamedNull_sameName_mapsToSameNamedNull() throws IOException { + NamedNull null1 = skolemization.skolemizeNamedNull(name1); + NamedNull null2 = skolemization.skolemizeNamedNull(name1); + + assertEquals(null1.getName(), null2.getName()); + } + + @Test + public void skolemizeNamedNull_differentName_mapsToDifferentNamedNull() throws IOException { + NamedNull null1 = skolemization.skolemizeNamedNull(name1); + NamedNull null2 = skolemization.skolemizeNamedNull(name2); + + assertNotEquals(null1.getName(), null2.getName()); + } + + @Test + public void skolemizeNamedNull_differentInstances_mapsToDifferentNamedNull() throws IOException { + NamedNull null1 = skolemization.skolemizeNamedNull(name1); + Skolemization other = new Skolemization(); + NamedNull null2 = other.skolemizeNamedNull(name1); + + assertNotEquals(null1.getName(), null2.getName()); + } + + @Test + public void skolemizeNamedNull_differentInstancesDifferentNames_mapsToDifferentNamedNull() throws IOException { + NamedNull null1 = skolemization.skolemizeNamedNull(name1); + Skolemization other = new Skolemization(); + NamedNull null2 = other.skolemizeNamedNull(name2); + + assertNotEquals(null1.getName(), null2.getName()); + assertEquals(null1.getName(), skolemization.skolemizeNamedNull(name1).getName()); + assertEquals(null2.getName(), other.skolemizeNamedNull(name2).getName()); + } +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index ac977898e..c284342d3 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -1,8 +1,5 @@ package org.semanticweb.vlog4j.parser.javacc; -import java.io.ByteArrayOutputStream; -import java.io.IOException; - /*- * #%L * vlog4j-parser @@ -23,9 +20,9 @@ * #L% */ +import java.io.IOException; import java.util.HashSet; import java.util.List; -import java.util.UUID; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.AbstractConstant; @@ -38,8 +35,8 @@ import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.RenamedNamedNull; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.implementation.Skolemization; import org.semanticweb.vlog4j.parser.DefaultParserConfiguration; import org.semanticweb.vlog4j.parser.DirectiveArgument; import org.semanticweb.vlog4j.parser.LocalPrefixDeclarationRegistry; @@ -66,7 +63,7 @@ public class JavaCCParserBase { private KnowledgeBase knowledgeBase; private ParserConfiguration parserConfiguration; - private byte[] namedNullNamespace = UUID.randomUUID().toString().getBytes(); + private Skolemization skolemization = new Skolemization(); /** * "Local" variable to remember (universal) body variables during parsing. @@ -165,15 +162,11 @@ Constant createConstant(String lexicalForm, String datatype) throws ParseExcepti } NamedNull createNamedNull(String lexicalForm) throws ParseException { - ByteArrayOutputStream stream = new ByteArrayOutputStream(); try { - stream.write(namedNullNamespace); - stream.write(lexicalForm.getBytes()); + return this.skolemization.skolemizeNamedNull(lexicalForm); } catch (IOException e) { throw makeParseExceptionWithCause("Failed to generate a unique name for named null", e); } - - return new RenamedNamedNull(UUID.nameUUIDFromBytes(stream.toByteArray())); } void addStatement(Statement statement) { @@ -326,12 +319,12 @@ public ParserConfiguration getParserConfiguration() { return parserConfiguration; } - byte[] getNamedNullNamespace() { - return namedNullNamespace; + Skolemization getSkolemization() { + return skolemization; } - void setNamedNullNamespace(byte[] namedNullNamespace) { - this.namedNullNamespace = namedNullNamespace; + void setSkolemization(Skolemization skolemization) { + this.skolemization = skolemization; } public void setPrefixDeclarationRegistry(PrefixDeclarationRegistry prefixDeclarationRegistry) { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java index 2332afa83..ba1f38155 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java @@ -26,6 +26,7 @@ import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.implementation.Skolemization; import org.semanticweb.vlog4j.parser.ParserConfiguration; import org.semanticweb.vlog4j.parser.RuleParser; @@ -41,7 +42,7 @@ public class SubParserFactory { private final KnowledgeBase knowledgeBase; private final ParserConfiguration parserConfiguration; private final PrefixDeclarationRegistry prefixDeclarationRegistry; - private final byte[] namedNullNamespace; + private final Skolemization skolemization; /** * Construct a SubParserFactory. @@ -52,7 +53,7 @@ public class SubParserFactory { this.knowledgeBase = parser.getKnowledgeBase(); this.prefixDeclarationRegistry = parser.getPrefixDeclarationRegistry(); this.parserConfiguration = parser.getParserConfiguration(); - this.namedNullNamespace = parser.getNamedNullNamespace(); + this.skolemization = parser.getSkolemization(); } /** @@ -69,7 +70,7 @@ public JavaCCParser makeSubParser(final InputStream inputStream, final String en subParser.setKnowledgeBase(this.knowledgeBase); subParser.setPrefixDeclarationRegistry(this.prefixDeclarationRegistry); subParser.setParserConfiguration(this.parserConfiguration); - subParser.setNamedNullNamespace(this.namedNullNamespace); + subParser.setSkolemization(this.skolemization); return subParser; } From ede5576e814223f504400f5391b32e94f97eac5e Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Mar 2020 14:43:19 +0100 Subject: [PATCH 0566/1003] Rename all files, directories, and artifacts --- .gitignore | 10 +- LICENSE.txt | 402 +++++++++--------- README.md | 40 +- RELEASE-NOTES.md | 5 +- build-vlog-library.sh | 6 +- coverage/pom.xml | 28 +- pom.xml | 53 +-- .../LICENSE.txt | 0 {vlog4j-client => rulewerk-client}/pom.xml | 20 +- .../rulewerk}/client/picocli/ClientUtils.java | 10 +- .../client/picocli/PrintQueryResults.java | 2 +- .../rulewerk}/client/picocli/SaveModel.java | 2 +- .../client/picocli/SaveQueryResults.java | 2 +- .../client/picocli/VLog4jClient.java | 2 +- .../picocli/VLog4jClientMaterialize.java | 18 +- .../client/picocli/PrintQueryResultsTest.java | 6 +- .../client/picocli/SaveModelTest.java | 2 +- .../client/picocli/SaveQueryResultsTest.java | 2 +- {vlog4j-core => rulewerk-core}/LICENSE.txt | 0 {vlog4j-core => rulewerk-core}/pom.xml | 130 +++--- .../IncompatiblePredicateArityException.java | 6 +- .../PrefixDeclarationException.java | 2 +- .../exceptions/ReasonerStateException.java | 4 +- .../core/exceptions/VLog4jException.java | 2 +- .../exceptions/VLog4jRuntimeException.java | 2 +- .../core/model/api/AbstractConstant.java | 4 +- .../rulewerk}/core/model/api/Conjunction.java | 4 +- .../rulewerk}/core/model/api/Constant.java | 2 +- .../rulewerk}/core/model/api/DataSource.java | 2 +- .../core/model/api/DataSourceDeclaration.java | 4 +- .../core/model/api/DatatypeConstant.java | 4 +- .../rulewerk}/core/model/api/Entity.java | 2 +- .../core/model/api/ExistentialVariable.java | 4 +- .../rulewerk}/core/model/api/Fact.java | 4 +- .../model/api/LanguageStringConstant.java | 4 +- .../rulewerk}/core/model/api/Literal.java | 4 +- .../rulewerk}/core/model/api/NamedNull.java | 4 +- .../core/model/api/NegativeLiteral.java | 2 +- .../core/model/api/PositiveLiteral.java | 2 +- .../rulewerk}/core/model/api/Predicate.java | 4 +- .../model/api/PrefixDeclarationRegistry.java | 4 +- .../rulewerk}/core/model/api/QueryResult.java | 2 +- .../rulewerk}/core/model/api/Rule.java | 4 +- .../rulewerk}/core/model/api/Statement.java | 2 +- .../core/model/api/StatementVisitor.java | 2 +- .../core/model/api/SyntaxObject.java | 2 +- .../rulewerk}/core/model/api/Term.java | 2 +- .../rulewerk}/core/model/api/TermType.java | 2 +- .../rulewerk}/core/model/api/TermVisitor.java | 2 +- .../rulewerk}/core/model/api/Terms.java | 2 +- .../core/model/api/UniversalVariable.java | 4 +- .../rulewerk}/core/model/api/Variable.java | 2 +- .../implementation/AbstractConstantImpl.java | 6 +- .../implementation/AbstractLiteralImpl.java | 8 +- .../AbstractPrefixDeclarationRegistry.java | 6 +- .../implementation/AbstractTermImpl.java | 4 +- .../model/implementation/ConjunctionImpl.java | 8 +- .../DataSourceDeclarationImpl.java | 10 +- .../implementation/DatatypeConstantImpl.java | 6 +- .../ExistentialVariableImpl.java | 6 +- .../model/implementation/Expressions.java | 28 +- .../core/model/implementation/FactImpl.java | 10 +- .../LanguageStringConstantImpl.java | 6 +- .../MergingPrefixDeclarationRegistry.java | 4 +- .../model/implementation/NamedNullImpl.java | 6 +- .../implementation/NegativeLiteralImpl.java | 8 +- .../implementation/PositiveLiteralImpl.java | 8 +- .../model/implementation/PredicateImpl.java | 4 +- .../implementation/RenamedNamedNull.java | 6 +- .../core/model/implementation/RuleImpl.java | 16 +- .../core/model/implementation/Serializer.java | 44 +- .../implementation/UniversalVariableImpl.java | 6 +- .../core/reasoner/AcyclicityNotion.java | 2 +- .../rulewerk}/core/reasoner/Algorithm.java | 2 +- .../rulewerk}/core/reasoner/Correctness.java | 2 +- .../core/reasoner/CyclicityResult.java | 2 +- .../core/reasoner/KnowledgeBase.java | 28 +- .../core/reasoner/KnowledgeBaseListener.java | 4 +- .../rulewerk}/core/reasoner/LogLevel.java | 2 +- .../core/reasoner/QueryAnswerCount.java | 2 +- .../core/reasoner/QueryResultIterator.java | 4 +- .../rulewerk}/core/reasoner/Reasoner.java | 22 +- .../core/reasoner/ReasonerState.java | 2 +- .../core/reasoner/RuleRewriteStrategy.java | 2 +- .../implementation/CsvFileDataSource.java | 4 +- .../EmptyQueryResultIterator.java | 8 +- .../implementation/FileDataSource.java | 2 +- .../implementation/InMemoryDataSource.java | 6 +- .../implementation/ModelToVLogConverter.java | 22 +- .../implementation/QueryAnswerCountImpl.java | 6 +- .../implementation/QueryResultImpl.java | 6 +- .../implementation/RdfFileDataSource.java | 4 +- .../implementation/Skolemization.java | 6 +- .../SparqlQueryResultDataSource.java | 6 +- .../implementation/TermToVLogConverter.java | 20 +- .../implementation/VLogDataSource.java | 4 +- .../implementation/VLogKnowledgeBase.java | 34 +- .../VLogQueryResultIterator.java | 8 +- .../reasoner/implementation/VLogReasoner.java | 50 +-- .../implementation/VLogToModelConverter.java | 20 +- .../src/test/data/input/binaryFacts.csv | 0 .../src/test/data/input/constantD.csv | 0 .../src/test/data/input/empty.csv | 0 .../test/data/input/invalidFormatNtFacts.nt | 0 .../src/test/data/input/ternaryFacts.nt | 0 .../test/data/input/ternaryFactsZipped.nt.gz | Bin .../src/test/data/input/unaryFacts.csv | 0 .../src/test/data/input/unaryFactsCD.csv | 0 .../test/data/input/unaryFactsZipped.csv.gz | Bin .../src/test/data/output/.keep | 0 .../core/model/ConjunctionImplTest.java | 18 +- .../core/model/DataSourceDeclarationTest.java | 20 +- .../rulewerk}/core/model/FactTest.java | 14 +- .../MergingPrefixDeclarationRegistryTest.java | 8 +- .../core/model/NegativeLiteralImplTest.java | 20 +- .../core/model/PositiveLiteralImplTest.java | 20 +- .../core/model/PredicateImplTest.java | 8 +- .../rulewerk}/core/model/RuleImplTest.java | 24 +- .../rulewerk}/core/model/TermImplTest.java | 24 +- .../core/reasoner/KnowledgeBaseTest.java | 10 +- .../rulewerk}/core/reasoner/LoggingTest.java | 16 +- .../core/reasoner/ReasonerTimeoutTest.java | 16 +- .../implementation/AddDataSourceTest.java | 22 +- .../implementation/AnswerQueryTest.java | 28 +- .../implementation/CsvFileDataSourceTest.java | 4 +- .../FileDataSourceTestUtils.java | 16 +- .../GeneratedAnonymousIndividualsTest.java | 22 +- .../ModelToVLogConverterTest.java | 24 +- .../implementation/QueryAnswerCountTest.java | 22 +- .../QueryAnsweringCorrectnessTest.java | 30 +- .../implementation/QueryResultImplTest.java | 12 +- .../implementation/QueryResultsUtils.java | 8 +- .../implementation/RdfFileDataSourceTest.java | 4 +- .../implementation/SkolemizationTest.java | 4 +- .../SparqlQueryResultDataSourceTest.java | 6 +- .../implementation/VLogReasonerBasics.java | 22 +- .../VLogReasonerCombinedInputs.java | 24 +- .../implementation/VLogReasonerCsvInput.java | 24 +- .../implementation/VLogReasonerCsvOutput.java | 14 +- .../implementation/VLogReasonerNegation.java | 22 +- .../implementation/VLogReasonerRdfInput.java | 16 +- .../VLogReasonerSparqlInput.java | 18 +- .../implementation/VLogReasonerStateTest.java | 34 +- .../VLogReasonerWriteInferencesTest.java | 30 +- .../VLogToModelConverterTest.java | 14 +- .../vlog/ExportQueryResultToCsvFileTest.java | 4 +- .../core/reasoner/vlog/LargeAritiesTest.java | 2 +- .../reasoner/vlog/StratifiedNegationTest.java | 4 +- .../vlog/VLogDataFromCsvFileTest.java | 4 +- .../reasoner/vlog/VLogDataFromMemoryTest.java | 2 +- .../vlog/VLogDataFromRdfFileTest.java | 4 +- .../core/reasoner/vlog/VLogExpressions.java | 2 +- .../reasoner/vlog/VLogQueryResultUtils.java | 2 +- .../core/reasoner/vlog/VLogQueryTest.java | 2 +- .../core/reasoner/vlog/VLogTermNamesTest.java | 2 +- .../LICENSE.txt | 0 .../README.md | 0 .../pom.xml | 179 ++++---- .../src/main/data/.gitignore | 0 .../src/main/data/input/bicycleEDB.csv.gz | Bin .../main/data/input/counting-triangles.rls | 0 .../src/main/data/input/doid.nt.gz | Bin .../src/main/data/input/doid.rls | 0 .../main/data/input/graal/doid-example.dlgp | 0 .../src/main/data/input/graal/example.dlgp | 0 .../src/main/data/input/hasPartEDB.csv.gz | Bin .../src/main/data/input/owl/bike.owl | 0 .../rdf/iswc-2016-complete-alignments.rdf | 0 .../main/data/input/ternaryBicycleEDB.nt.gz | Bin .../src/main/data/input/wheelEDB.csv.gz | Bin .../src/main/data/output/.keep | 0 .../examples/CompareWikidataDBpedia.java | 12 +- .../rulewerk}/examples/CountingTriangles.java | 10 +- .../rulewerk}/examples/DoidExample.java | 14 +- .../rulewerk}/examples/ExamplesUtils.java | 22 +- .../InMemoryGraphAnalysisExample.java | 18 +- .../examples/SimpleReasoningExample.java | 12 +- .../examples/core/AddDataFromCsvFile.java | 18 +- .../examples/core/AddDataFromRdfFile.java | 20 +- .../core/AddDataFromSparqlQueryResults.java | 30 +- .../core/ConfigureReasonerLogging.java | 12 +- .../SkolemVsRestrictedChaseTermination.java | 16 +- .../examples/graal/AddDataFromDlgpFile.java | 12 +- .../examples/graal/AddDataFromGraal.java | 16 +- .../examples/graal/DoidExampleGraal.java | 32 +- .../owlapi/OwlOntologyToRulesAndFacts.java | 26 +- .../examples/rdf/AddDataFromRdfModel.java | 28 +- .../src/main/logs/.keep | 0 {vlog4j-graal => rulewerk-graal}/LICENSE.txt | 0 {vlog4j-graal => rulewerk-graal}/pom.xml | 10 +- .../graal/GraalConjunctiveQueryToRule.java | 14 +- .../graal/GraalConvertException.java | 2 +- .../graal/GraalToVLog4JModelConverter.java | 16 +- .../GraalToVLog4JModelConverterTest.java | 16 +- .../LICENSE.txt | 0 {vlog4j-owlapi => rulewerk-owlapi}/pom.xml | 72 ++-- .../owlapi/AbstractClassToRuleConverter.java | 12 +- .../owlapi/ClassToRuleBodyConverter.java | 10 +- .../owlapi/ClassToRuleHeadConverter.java | 8 +- .../owlapi/OwlAxiomToRulesConverter.java | 32 +- .../OwlFeatureNotSupportedException.java | 2 +- .../owlapi/OwlToRulesConversionHelper.java | 22 +- .../rulewerk}/owlapi/OwlToRulesConverter.java | 6 +- .../owlapi/OwlAxiomToRulesConverterTest.java | 16 +- .../LICENSE.txt | 0 {vlog4j-parser => rulewerk-parser}/pom.xml | 12 +- .../parser/ConfigurableLiteralHandler.java | 6 +- .../parser/DataSourceDeclarationHandler.java | 4 +- .../parser/DatatypeConstantHandler.java | 4 +- .../parser/DefaultParserConfiguration.java | 14 +- .../rulewerk}/parser/DirectiveArgument.java | 4 +- .../rulewerk}/parser/DirectiveHandler.java | 12 +- .../LocalPrefixDeclarationRegistry.java | 8 +- .../rulewerk}/parser/ParserConfiguration.java | 30 +- .../rulewerk}/parser/ParsingException.java | 4 +- .../rulewerk}/parser/RuleParser.java | 30 +- .../CsvFileDataSourceDeclarationHandler.java | 16 +- .../RdfFileDataSourceDeclarationHandler.java | 16 +- ...eryResultDataSourceDeclarationHandler.java | 16 +- .../ImportFileDirectiveHandler.java | 16 +- .../ImportFileRelativeDirectiveHandler.java | 18 +- .../rulewerk}/parser/javacc/.gitignore | 0 .../rulewerk}/parser/javacc/JavaCCParser.jj | 44 +- .../parser/javacc/JavaCCParserBase.java | 38 +- .../parser/javacc/SubParserFactory.java | 12 +- .../parser/DirectiveArgumentTest.java | 6 +- .../parser/DirectiveHandlerTest.java | 6 +- .../rulewerk}/parser/EntityTest.java | 30 +- .../parser/ParserConfigurationTest.java | 14 +- .../rulewerk}/parser/ParserTestUtils.java | 10 +- .../RuleParserConfigurableLiteralTest.java | 20 +- .../parser/RuleParserDataSourceTest.java | 32 +- .../parser/RuleParserParseFactTest.java | 24 +- .../rulewerk}/parser/RuleParserTest.java | 32 +- .../parser/javacc/JavaCCParserBaseTest.java | 12 +- .../src/test/resources/base.rls | 0 .../src/test/resources/blank.rls | 0 .../src/test/resources/facts.rls | 0 {vlog4j-rdf => rulewerk-rdf}/LICENSE.txt | 0 {vlog4j-rdf => rulewerk-rdf}/pom.xml | 10 +- .../rulewerk}/rdf/RdfModelConverter.java | 16 +- .../rdf/RdfValueToTermConverter.java | 14 +- .../src/test/data/input/collections.ttl | 0 .../src/test/data/input/escapedCharacters.ttl | 0 .../src/test/data/input/exampleFacts.ttl | 0 .../src/test/data/input/labelledBNodes.ttl | 0 .../src/test/data/input/languageTags.ttl | 0 .../src/test/data/input/literalValues.ttl | 0 .../src/test/data/input/relativeURIs.ttl | 0 .../src/test/data/input/unlabelledBNodes.ttl | 0 .../data/input/unnormalizedLiteralValues.ttl | 0 .../src/test/data/output/.keep | 0 .../rulewerk}/rdf/RdfTestUtils.java | 12 +- .../rdf/TestConvertRdfFileToFacts.java | 22 +- .../rulewerk}/rdf/TestReasonOverRdfFacts.java | 22 +- 255 files changed, 1634 insertions(+), 1629 deletions(-) rename {vlog4j-client => rulewerk-client}/LICENSE.txt (100%) rename {vlog4j-client => rulewerk-client}/pom.xml (82%) rename {vlog4j-client/src/main/java/org/semanticweb/vlog4j => rulewerk-client/src/main/java/org/semanticweb/rulewerk}/client/picocli/ClientUtils.java (92%) rename {vlog4j-client/src/main/java/org/semanticweb/vlog4j => rulewerk-client/src/main/java/org/semanticweb/rulewerk}/client/picocli/PrintQueryResults.java (98%) rename {vlog4j-client/src/main/java/org/semanticweb/vlog4j => rulewerk-client/src/main/java/org/semanticweb/rulewerk}/client/picocli/SaveModel.java (98%) rename {vlog4j-client/src/main/java/org/semanticweb/vlog4j => rulewerk-client/src/main/java/org/semanticweb/rulewerk}/client/picocli/SaveQueryResults.java (98%) rename {vlog4j-client/src/main/java/org/semanticweb/vlog4j => rulewerk-client/src/main/java/org/semanticweb/rulewerk}/client/picocli/VLog4jClient.java (96%) rename {vlog4j-client/src/main/java/org/semanticweb/vlog4j => rulewerk-client/src/main/java/org/semanticweb/rulewerk}/client/picocli/VLog4jClientMaterialize.java (93%) rename {vlog4j-client/src/test/java/org/semanticweb/vlog4j => rulewerk-client/src/test/java/org/semanticweb/rulewerk}/client/picocli/PrintQueryResultsTest.java (96%) rename {vlog4j-client/src/test/java/org/semanticweb/vlog4j => rulewerk-client/src/test/java/org/semanticweb/rulewerk}/client/picocli/SaveModelTest.java (99%) rename {vlog4j-client/src/test/java/org/semanticweb/vlog4j => rulewerk-client/src/test/java/org/semanticweb/rulewerk}/client/picocli/SaveQueryResultsTest.java (99%) rename {vlog4j-core => rulewerk-core}/LICENSE.txt (100%) rename {vlog4j-core => rulewerk-core}/pom.xml (79%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/exceptions/IncompatiblePredicateArityException.java (89%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/exceptions/PrefixDeclarationException.java (95%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/exceptions/ReasonerStateException.java (92%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/exceptions/VLog4jException.java (96%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/exceptions/VLog4jRuntimeException.java (95%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/AbstractConstant.java (91%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/Conjunction.java (91%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/Constant.java (94%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/DataSource.java (95%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/DataSourceDeclaration.java (91%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/DatatypeConstant.java (94%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/Entity.java (95%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/ExistentialVariable.java (90%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/Fact.java (89%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/LanguageStringConstant.java (94%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/Literal.java (92%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/NamedNull.java (91%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/NegativeLiteral.java (94%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/PositiveLiteral.java (94%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/Predicate.java (92%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/PrefixDeclarationRegistry.java (96%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/QueryResult.java (92%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/Rule.java (92%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/Statement.java (95%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/StatementVisitor.java (96%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/SyntaxObject.java (98%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/Term.java (97%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/TermType.java (97%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/TermVisitor.java (97%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/Terms.java (98%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/UniversalVariable.java (90%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/Variable.java (96%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/AbstractConstantImpl.java (87%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/AbstractLiteralImpl.java (93%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/AbstractPrefixDeclarationRegistry.java (92%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/AbstractTermImpl.java (93%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/ConjunctionImpl.java (89%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/DataSourceDeclarationImpl.java (87%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/DatatypeConstantImpl.java (91%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/ExistentialVariableImpl.java (86%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/Expressions.java (93%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/FactImpl.java (81%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/LanguageStringConstantImpl.java (91%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/MergingPrefixDeclarationRegistry.java (97%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/NamedNullImpl.java (88%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/NegativeLiteralImpl.java (80%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/PositiveLiteralImpl.java (80%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/PredicateImpl.java (90%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/RenamedNamedNull.java (84%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/RuleImpl.java (88%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/Serializer.java (92%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/UniversalVariableImpl.java (86%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/AcyclicityNotion.java (94%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/Algorithm.java (92%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/Correctness.java (96%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/CyclicityResult.java (96%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/KnowledgeBase.java (94%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/KnowledgeBaseListener.java (94%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/LogLevel.java (93%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/QueryAnswerCount.java (98%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/QueryResultIterator.java (92%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/Reasoner.java (97%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/ReasonerState.java (97%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/RuleRewriteStrategy.java (91%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/implementation/CsvFileDataSource.java (94%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/implementation/EmptyQueryResultIterator.java (82%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/implementation/FileDataSource.java (98%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/implementation/InMemoryDataSource.java (95%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/implementation/ModelToVLogConverter.java (87%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/implementation/QueryAnswerCountImpl.java (88%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/implementation/QueryResultImpl.java (90%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/implementation/RdfFileDataSource.java (94%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/implementation/Skolemization.java (90%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/implementation/SparqlQueryResultDataSource.java (96%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/implementation/TermToVLogConverter.java (84%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/implementation/VLogDataSource.java (90%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/implementation/VLogKnowledgeBase.java (89%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/implementation/VLogQueryResultIterator.java (87%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/implementation/VLogReasoner.java (93%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/implementation/VLogToModelConverter.java (86%) rename {vlog4j-core => rulewerk-core}/src/test/data/input/binaryFacts.csv (100%) rename {vlog4j-core => rulewerk-core}/src/test/data/input/constantD.csv (100%) rename {vlog4j-core => rulewerk-core}/src/test/data/input/empty.csv (100%) rename {vlog4j-core => rulewerk-core}/src/test/data/input/invalidFormatNtFacts.nt (100%) rename {vlog4j-core => rulewerk-core}/src/test/data/input/ternaryFacts.nt (100%) rename {vlog4j-core => rulewerk-core}/src/test/data/input/ternaryFactsZipped.nt.gz (100%) rename {vlog4j-core => rulewerk-core}/src/test/data/input/unaryFacts.csv (100%) rename {vlog4j-core => rulewerk-core}/src/test/data/input/unaryFactsCD.csv (100%) rename {vlog4j-core => rulewerk-core}/src/test/data/input/unaryFactsZipped.csv.gz (100%) rename {vlog4j-core => rulewerk-core}/src/test/data/output/.keep (100%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/model/ConjunctionImplTest.java (94%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/model/DataSourceDeclarationTest.java (88%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/model/FactTest.java (81%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/model/MergingPrefixDeclarationRegistryTest.java (96%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/model/NegativeLiteralImplTest.java (89%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/model/PositiveLiteralImplTest.java (89%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/model/PredicateImplTest.java (89%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/model/RuleImplTest.java (90%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/model/TermImplTest.java (87%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/KnowledgeBaseTest.java (92%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/LoggingTest.java (92%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/ReasonerTimeoutTest.java (91%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/AddDataSourceTest.java (94%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/AnswerQueryTest.java (94%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/CsvFileDataSourceTest.java (95%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/FileDataSourceTestUtils.java (89%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java (89%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/ModelToVLogConverterTest.java (91%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/QueryAnswerCountTest.java (95%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java (96%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/QueryResultImplTest.java (80%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/QueryResultsUtils.java (88%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/RdfFileDataSourceTest.java (94%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/SkolemizationTest.java (95%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java (93%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/VLogReasonerBasics.java (80%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/VLogReasonerCombinedInputs.java (88%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/VLogReasonerCsvInput.java (85%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/VLogReasonerCsvOutput.java (93%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/VLogReasonerNegation.java (83%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/VLogReasonerRdfInput.java (90%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/VLogReasonerSparqlInput.java (93%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/VLogReasonerStateTest.java (93%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java (85%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/VLogToModelConverterTest.java (87%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/vlog/ExportQueryResultToCsvFileTest.java (94%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/vlog/LargeAritiesTest.java (96%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/vlog/StratifiedNegationTest.java (97%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/vlog/VLogDataFromCsvFileTest.java (97%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/vlog/VLogDataFromMemoryTest.java (97%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/vlog/VLogDataFromRdfFileTest.java (96%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/vlog/VLogExpressions.java (98%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/vlog/VLogQueryResultUtils.java (97%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/vlog/VLogQueryTest.java (98%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/vlog/VLogTermNamesTest.java (98%) rename {vlog4j-examples => rulewerk-examples}/LICENSE.txt (100%) rename {vlog4j-examples => rulewerk-examples}/README.md (100%) rename {vlog4j-examples => rulewerk-examples}/pom.xml (79%) rename {vlog4j-examples => rulewerk-examples}/src/main/data/.gitignore (100%) rename {vlog4j-examples => rulewerk-examples}/src/main/data/input/bicycleEDB.csv.gz (100%) rename {vlog4j-examples => rulewerk-examples}/src/main/data/input/counting-triangles.rls (100%) rename {vlog4j-examples => rulewerk-examples}/src/main/data/input/doid.nt.gz (100%) rename {vlog4j-examples => rulewerk-examples}/src/main/data/input/doid.rls (100%) rename {vlog4j-examples => rulewerk-examples}/src/main/data/input/graal/doid-example.dlgp (100%) rename {vlog4j-examples => rulewerk-examples}/src/main/data/input/graal/example.dlgp (100%) rename {vlog4j-examples => rulewerk-examples}/src/main/data/input/hasPartEDB.csv.gz (100%) rename {vlog4j-examples => rulewerk-examples}/src/main/data/input/owl/bike.owl (100%) rename {vlog4j-examples => rulewerk-examples}/src/main/data/input/rdf/iswc-2016-complete-alignments.rdf (100%) rename {vlog4j-examples => rulewerk-examples}/src/main/data/input/ternaryBicycleEDB.nt.gz (100%) rename {vlog4j-examples => rulewerk-examples}/src/main/data/input/wheelEDB.csv.gz (100%) rename {vlog4j-examples => rulewerk-examples}/src/main/data/output/.keep (100%) rename {vlog4j-examples/src/main/java/org/semanticweb/vlog4j => rulewerk-examples/src/main/java/org/semanticweb/rulewerk}/examples/CompareWikidataDBpedia.java (92%) rename {vlog4j-examples/src/main/java/org/semanticweb/vlog4j => rulewerk-examples/src/main/java/org/semanticweb/rulewerk}/examples/CountingTriangles.java (89%) rename {vlog4j-examples/src/main/java/org/semanticweb/vlog4j => rulewerk-examples/src/main/java/org/semanticweb/rulewerk}/examples/DoidExample.java (86%) rename {vlog4j-examples/src/main/java/org/semanticweb/vlog4j => rulewerk-examples/src/main/java/org/semanticweb/rulewerk}/examples/ExamplesUtils.java (87%) rename {vlog4j-examples/src/main/java/org/semanticweb/vlog4j => rulewerk-examples/src/main/java/org/semanticweb/rulewerk}/examples/InMemoryGraphAnalysisExample.java (85%) rename {vlog4j-examples/src/main/java/org/semanticweb/vlog4j => rulewerk-examples/src/main/java/org/semanticweb/rulewerk}/examples/SimpleReasoningExample.java (90%) rename {vlog4j-examples/src/main/java/org/semanticweb/vlog4j => rulewerk-examples/src/main/java/org/semanticweb/rulewerk}/examples/core/AddDataFromCsvFile.java (87%) rename {vlog4j-examples/src/main/java/org/semanticweb/vlog4j => rulewerk-examples/src/main/java/org/semanticweb/rulewerk}/examples/core/AddDataFromRdfFile.java (86%) rename {vlog4j-examples/src/main/java/org/semanticweb/vlog4j => rulewerk-examples/src/main/java/org/semanticweb/rulewerk}/examples/core/AddDataFromSparqlQueryResults.java (86%) rename {vlog4j-examples/src/main/java/org/semanticweb/vlog4j => rulewerk-examples/src/main/java/org/semanticweb/rulewerk}/examples/core/ConfigureReasonerLogging.java (92%) rename {vlog4j-examples/src/main/java/org/semanticweb/vlog4j => rulewerk-examples/src/main/java/org/semanticweb/rulewerk}/examples/core/SkolemVsRestrictedChaseTermination.java (88%) rename {vlog4j-examples/src/main/java/org/semanticweb/vlog4j => rulewerk-examples/src/main/java/org/semanticweb/rulewerk}/examples/graal/AddDataFromDlgpFile.java (93%) rename {vlog4j-examples/src/main/java/org/semanticweb/vlog4j => rulewerk-examples/src/main/java/org/semanticweb/rulewerk}/examples/graal/AddDataFromGraal.java (91%) rename {vlog4j-examples/src/main/java/org/semanticweb/vlog4j => rulewerk-examples/src/main/java/org/semanticweb/rulewerk}/examples/graal/DoidExampleGraal.java (84%) rename {vlog4j-examples/src/main/java/org/semanticweb/vlog4j => rulewerk-examples/src/main/java/org/semanticweb/rulewerk}/examples/owlapi/OwlOntologyToRulesAndFacts.java (84%) rename {vlog4j-examples/src/main/java/org/semanticweb/vlog4j => rulewerk-examples/src/main/java/org/semanticweb/rulewerk}/examples/rdf/AddDataFromRdfModel.java (87%) rename {vlog4j-examples => rulewerk-examples}/src/main/logs/.keep (100%) rename {vlog4j-graal => rulewerk-graal}/LICENSE.txt (100%) rename {vlog4j-graal => rulewerk-graal}/pom.xml (80%) rename {vlog4j-graal/src/main/java/org/semanticweb/vlog4j => rulewerk-graal/src/main/java/org/semanticweb/rulewerk}/graal/GraalConjunctiveQueryToRule.java (90%) rename {vlog4j-graal/src/main/java/org/semanticweb/vlog4j => rulewerk-graal/src/main/java/org/semanticweb/rulewerk}/graal/GraalConvertException.java (96%) rename {vlog4j-graal/src/main/java/org/semanticweb/vlog4j => rulewerk-graal/src/main/java/org/semanticweb/rulewerk}/graal/GraalToVLog4JModelConverter.java (96%) rename {vlog4j-graal/src/test/java/org/semanticweb/vlog4j => rulewerk-graal/src/test/java/org/semanticweb/rulewerk}/graal/GraalToVLog4JModelConverterTest.java (96%) rename {vlog4j-owlapi => rulewerk-owlapi}/LICENSE.txt (100%) rename {vlog4j-owlapi => rulewerk-owlapi}/pom.xml (79%) rename {vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j => rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk}/owlapi/AbstractClassToRuleConverter.java (96%) rename {vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j => rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk}/owlapi/ClassToRuleBodyConverter.java (95%) rename {vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j => rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk}/owlapi/ClassToRuleHeadConverter.java (96%) rename {vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j => rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk}/owlapi/OwlAxiomToRulesConverter.java (95%) rename {vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j => rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk}/owlapi/OwlFeatureNotSupportedException.java (96%) rename {vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j => rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk}/owlapi/OwlToRulesConversionHelper.java (90%) rename {vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j => rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk}/owlapi/OwlToRulesConverter.java (92%) rename {vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j => rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk}/owlapi/OwlAxiomToRulesConverterTest.java (98%) rename {vlog4j-parser => rulewerk-parser}/LICENSE.txt (100%) rename {vlog4j-parser => rulewerk-parser}/pom.xml (93%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/ConfigurableLiteralHandler.java (89%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/DataSourceDeclarationHandler.java (89%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/DatatypeConstantHandler.java (91%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/DefaultParserConfiguration.java (74%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/DirectiveArgument.java (98%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/DirectiveHandler.java (95%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/LocalPrefixDeclarationRegistry.java (90%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/ParserConfiguration.java (90%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/ParsingException.java (91%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/RuleParser.java (91%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/datasources/CsvFileDataSourceDeclarationHandler.java (74%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/datasources/RdfFileDataSourceDeclarationHandler.java (74%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java (74%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/directives/ImportFileDirectiveHandler.java (78%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/directives/ImportFileRelativeDirectiveHandler.java (77%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/javacc/.gitignore (100%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/javacc/JavaCCParser.jj (93%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/javacc/JavaCCParserBase.java (89%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/javacc/SubParserFactory.java (87%) rename {vlog4j-parser/src/test/java/org/semanticweb/vlog4j => rulewerk-parser/src/test/java/org/semanticweb/rulewerk}/parser/DirectiveArgumentTest.java (94%) rename {vlog4j-parser/src/test/java/org/semanticweb/vlog4j => rulewerk-parser/src/test/java/org/semanticweb/rulewerk}/parser/DirectiveHandlerTest.java (96%) rename {vlog4j-parser/src/test/java/org/semanticweb/vlog4j => rulewerk-parser/src/test/java/org/semanticweb/rulewerk}/parser/EntityTest.java (90%) rename {vlog4j-parser/src/test/java/org/semanticweb/vlog4j => rulewerk-parser/src/test/java/org/semanticweb/rulewerk}/parser/ParserConfigurationTest.java (90%) rename {vlog4j-parser/src/test/java/org/semanticweb/vlog4j => rulewerk-parser/src/test/java/org/semanticweb/rulewerk}/parser/ParserTestUtils.java (83%) rename {vlog4j-parser/src/test/java/org/semanticweb/vlog4j => rulewerk-parser/src/test/java/org/semanticweb/rulewerk}/parser/RuleParserConfigurableLiteralTest.java (95%) rename {vlog4j-parser/src/test/java/org/semanticweb/vlog4j => rulewerk-parser/src/test/java/org/semanticweb/rulewerk}/parser/RuleParserDataSourceTest.java (90%) rename {vlog4j-parser/src/test/java/org/semanticweb/vlog4j => rulewerk-parser/src/test/java/org/semanticweb/rulewerk}/parser/RuleParserParseFactTest.java (80%) rename {vlog4j-parser/src/test/java/org/semanticweb/vlog4j => rulewerk-parser/src/test/java/org/semanticweb/rulewerk}/parser/RuleParserTest.java (95%) rename {vlog4j-parser/src/test/java/org/semanticweb/vlog4j => rulewerk-parser/src/test/java/org/semanticweb/rulewerk}/parser/javacc/JavaCCParserBaseTest.java (89%) rename {vlog4j-parser => rulewerk-parser}/src/test/resources/base.rls (100%) rename {vlog4j-parser => rulewerk-parser}/src/test/resources/blank.rls (100%) rename {vlog4j-parser => rulewerk-parser}/src/test/resources/facts.rls (100%) rename {vlog4j-rdf => rulewerk-rdf}/LICENSE.txt (100%) rename {vlog4j-rdf => rulewerk-rdf}/pom.xml (88%) rename {vlog4j-rdf/src/main/java/org/semanticweb/vlog4j => rulewerk-rdf/src/main/java/org/semanticweb/rulewerk}/rdf/RdfModelConverter.java (89%) rename {vlog4j-rdf/src/main/java/org/semanticweb/vlog4j => rulewerk-rdf/src/main/java/org/semanticweb/rulewerk}/rdf/RdfValueToTermConverter.java (81%) rename {vlog4j-rdf => rulewerk-rdf}/src/test/data/input/collections.ttl (100%) rename {vlog4j-rdf => rulewerk-rdf}/src/test/data/input/escapedCharacters.ttl (100%) rename {vlog4j-rdf => rulewerk-rdf}/src/test/data/input/exampleFacts.ttl (100%) rename {vlog4j-rdf => rulewerk-rdf}/src/test/data/input/labelledBNodes.ttl (100%) rename {vlog4j-rdf => rulewerk-rdf}/src/test/data/input/languageTags.ttl (100%) rename {vlog4j-rdf => rulewerk-rdf}/src/test/data/input/literalValues.ttl (100%) rename {vlog4j-rdf => rulewerk-rdf}/src/test/data/input/relativeURIs.ttl (100%) rename {vlog4j-rdf => rulewerk-rdf}/src/test/data/input/unlabelledBNodes.ttl (100%) rename {vlog4j-rdf => rulewerk-rdf}/src/test/data/input/unnormalizedLiteralValues.ttl (100%) rename {vlog4j-rdf => rulewerk-rdf}/src/test/data/output/.keep (100%) rename {vlog4j-rdf/src/test/java/org/semanticweb/vlog4j => rulewerk-rdf/src/test/java/org/semanticweb/rulewerk}/rdf/RdfTestUtils.java (90%) rename {vlog4j-rdf/src/test/java/org/semanticweb/vlog4j => rulewerk-rdf/src/test/java/org/semanticweb/rulewerk}/rdf/TestConvertRdfFileToFacts.java (94%) rename {vlog4j-rdf/src/test/java/org/semanticweb/vlog4j => rulewerk-rdf/src/test/java/org/semanticweb/rulewerk}/rdf/TestReasonOverRdfFacts.java (85%) diff --git a/.gitignore b/.gitignore index 15e338099..8454baf10 100644 --- a/.gitignore +++ b/.gitignore @@ -32,7 +32,7 @@ target/ # Don't apply the above to src/ where Java requires # subdirectories named according to package names. # We do not want to forbid things like "dumpfiles" in -# package names. +# package names. !src/ # Use as directory for local testing code @@ -49,9 +49,9 @@ Thumbs.db # Output of tests and examples *.log -vlog4j-core/src/test/data/output/* -vlog4j-examples/src/main/data/output/* -vlog4j-examples/src/main/data/logs/* -vlog4j-rdf/src/main/data/output/* +rulewerk-core/src/test/data/output/* +rulewerk-examples/src/main/data/output/* +rulewerk-examples/src/main/data/logs/* +rulewerk-rdf/src/main/data/output/* /build-vlog/vlog/ /TAGS diff --git a/LICENSE.txt b/LICENSE.txt index 29f81d812..261eeb9e9 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -1,201 +1,201 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.md b/README.md index dbb5a647b..14a23a5e3 100644 --- a/README.md +++ b/README.md @@ -1,36 +1,36 @@ -VLog4j +Rulewerk ====== -[![Build Status](https://travis-ci.org/knowsys/vlog4j.png?branch=master)](https://travis-ci.org/knowsys/vlog4j) -[![Coverage Status](https://coveralls.io/repos/github/knowsys/vlog4j/badge.svg?branch=master)](https://coveralls.io/github/knowsys/vlog4j?branch=master) -[![Maven Central](https://maven-badges.herokuapp.com/maven-central/org.semanticweb.vlog4j/vlog4j-parent/badge.svg)](http://search.maven.org/#search|ga|1|g%3A%22org.semanticweb.vlog4j%22) +[![Build Status](https://travis-ci.org/knowsys/rulewerk.png?branch=master)](https://travis-ci.org/knowsys/rulewerk) +[![Coverage Status](https://coveralls.io/repos/github/knowsys/rulewerk/badge.svg?branch=master)](https://coveralls.io/github/knowsys/rulewerk?branch=master) +[![Maven Central](https://maven-badges.herokuapp.com/maven-central/org.semanticweb.rulewerk/rulewerk-parent/badge.svg)](http://search.maven.org/#search|ga|1|g%3A%22org.semanticweb.rulewerk%22) A Java library based on the [VLog rule engine](https://github.com/karmaresearch/vlog) Installation ------------ -The current release of VLog4j is version 0.5.0. The easiest way of using the library is with Maven. Maven users must add the following dependency to the dependencies in their pom.xml file: +The current release of Rulewerk is version 0.5.0. The easiest way of using the library is with Maven. Maven users must add the following dependency to the dependencies in their pom.xml file: ``` - org.semanticweb.vlog4j - vlog4j-core + org.semanticweb.rulewerk + rulewerk-core 0.5.0 ``` You need to use Java 1.8 or above. Available modules include: -* **vlog4j-core**: essential data models for rules and facts, and essential reasoner functionality -* **vlog4j-parser**: support for processing knowledge bases in [VLog4j syntax](https://github.com/knowsys/vlog4j/wiki/Rule-syntax-grammar) -* **vlog4j-graal**: support for converting rules, facts and queries from [Graal](http://graphik-team.github.io/graal/) API objects and [DLGP](http://graphik-team.github.io/graal/doc/dlgp) files -* **vlog4j-rdf**: support for reading from RDF files in Java (not required for loading RDF directly during reasoning) -* **vlog4j-owlapi**: support for converting rules from OWL ontology, loaded with the OWL API -* **vlog4j-client**: stand-alone application that builds a [command-line client](https://github.com/knowsys/vlog4j/wiki/Standalone-client) for VLog4j. +* **rulewerk-core**: essential data models for rules and facts, and essential reasoner functionality +* **rulewerk-parser**: support for processing knowledge bases in [Rulewerk syntax](https://github.com/knowsys/rulewerk/wiki/Rule-syntax-grammar) +* **rulewerk-graal**: support for converting rules, facts and queries from [Graal](http://graphik-team.github.io/graal/) API objects and [DLGP](http://graphik-team.github.io/graal/doc/dlgp) files +* **rulewerk-rdf**: support for reading from RDF files in Java (not required for loading RDF directly during reasoning) +* **rulewerk-owlapi**: support for converting rules from OWL ontology, loaded with the OWL API +* **rulewerk-client**: stand-alone application that builds a [command-line client](https://github.com/knowsys/rulewerk/wiki/Standalone-client) for Rulewerk. -The released packages use vlog4j-base, which packages system-dependent binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows: +The released packages use rulewerk-base, which packages system-dependent binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows: -* Run [build-vlog-library.sh](https://github.com/knowsys/vlog4j/blob/master/build-vlog-library.sh) or execute the commands in this file manually. This will compile a local jar file on your system, copy it to ```./vlog4j-core/lib/jvlog-local.jar```, and install the new jar locally in Maven in place of the distributed version of vlog4j-base. +* Run [build-vlog-library.sh](https://github.com/knowsys/rulewerk/blob/master/build-vlog-library.sh) or execute the commands in this file manually. This will compile a local jar file on your system, copy it to ```./rulewerk-core/lib/jvlog-local.jar```, and install the new jar locally in Maven in place of the distributed version of rulewerk-base. * Run ```mvn install``` to test if the setup works @@ -38,10 +38,10 @@ The released packages use vlog4j-base, which packages system-dependent binaries Documentation ------------- -* The module **vlog4j-examples** includes short example programs that demonstrate various features and use cases -* The GitHub project **[VLog4j Example](https://github.com/knowsys/vlog4j-example)** shows how to use VLog4j in own Maven projects and can be used as a skeleton for own projects -* [JavaDoc](https://knowsys.github.io/vlog4j/) is available online and through the Maven packages. -* A VLog4j [Wiki](https://github.com/knowsys/vlog4j/wiki) is available online, with detailed information about vlog4j usage, the supported rule language [examples](https://github.com/knowsys/vlog4j/wiki/Rule-syntax-by-examples) and [grammar](https://github.com/knowsys/vlog4j/wiki/Rule-syntax-grammar), and related publications. +* The module **rulewerk-examples** includes short example programs that demonstrate various features and use cases +* The GitHub project **[Rulewerk Example](https://github.com/knowsys/rulewerk-example)** shows how to use Rulewerk in own Maven projects and can be used as a skeleton for own projects +* [JavaDoc](https://knowsys.github.io/rulewerk/) is available online and through the Maven packages. +* A Rulewerk [Wiki](https://github.com/knowsys/rulewerk/wiki) is available online, with detailed information about rulewerk usage, the supported rule language [examples](https://github.com/knowsys/rulewerk/wiki/Rule-syntax-by-examples) and [grammar](https://github.com/knowsys/rulewerk/wiki/Rule-syntax-grammar), and related publications. Development ----------- @@ -49,5 +49,5 @@ Development * Pull requests are welcome. * The master branch may require a development version of VLog. Use the script `build-vlog-library.sh` to create and install it on your machine (you may need to delete previous local builds first). -* Users of Eclipse should install the javacc plugin to generate the parser sources. After installing the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.vlog4j.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. +* Users of Eclipse should install the javacc plugin to generate the parser sources. After installing the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.rulewerk.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. * We largely follow [Java Programming Style Guidelines published by Petroware](https://petroware.no/javastyle.html). The main exception are the names of private members, which do not usually end in underscores in our code. diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 6fb2310fa..5d2119244 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -1,10 +1,11 @@ -VLog4j Release Notes +Rulewerk Release Notes ==================== -VLog4j v0.6.0 +Rulewerk v0.6.0 ------------- Breaking changes: +* VLog4j is now called Rulewerk. * In the example package, `ExamplesUtils.getQueryAnswerCount(queryString, reasoner)` does no longer exist. It can be replaced by `reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral(queryString)).getCount()` diff --git a/build-vlog-library.sh b/build-vlog-library.sh index 7008b2fdd..02ecd116f 100755 --- a/build-vlog-library.sh +++ b/build-vlog-library.sh @@ -1,5 +1,5 @@ #!/bin/sh -# Script to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar on Unix-like systems +# Script to build unreleased snapshots of karmaresearch/vlog into rulewerk-base jar on Unix-like systems if [ -f "./local_builds/jvlog.jar" ] then @@ -21,6 +21,6 @@ else cd ../../.. fi -mkdir local_builds/jvlog.jar vlog4j-core/lib -cp local_builds/jvlog.jar vlog4j-core/lib/jvlog-local.jar +mkdir local_builds/jvlog.jar rulewerk-core/lib +cp local_builds/jvlog.jar rulewerk-core/lib/jvlog-local.jar mvn initialize -Pdevelopment diff --git a/coverage/pom.xml b/coverage/pom.xml index 16e01e7f8..977046572 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -4,8 +4,8 @@ 4.0.0 - org.semanticweb.vlog4j - vlog4j-parent + org.semanticweb.rulewerk + rulewerk-parent 0.6.0-SNAPSHOT @@ -14,33 +14,33 @@ coverage - org.semanticweb.vlog4j - vlog4j-core + org.semanticweb.rulewerk + rulewerk-core 0.6.0-SNAPSHOT - org.semanticweb.vlog4j - vlog4j-rdf + org.semanticweb.rulewerk + rulewerk-rdf 0.6.0-SNAPSHOT - org.semanticweb.vlog4j - vlog4j-owlapi + org.semanticweb.rulewerk + rulewerk-owlapi 0.6.0-SNAPSHOT - org.semanticweb.vlog4j - vlog4j-graal + org.semanticweb.rulewerk + rulewerk-graal 0.6.0-SNAPSHOT - org.semanticweb.vlog4j - vlog4j-parser + org.semanticweb.rulewerk + rulewerk-parser 0.6.0-SNAPSHOT - org.semanticweb.vlog4j - vlog4j-client + org.semanticweb.rulewerk + rulewerk-client 0.6.0-SNAPSHOT diff --git a/pom.xml b/pom.xml index ed6f23606..8ec79cb32 100644 --- a/pom.xml +++ b/pom.xml @@ -5,25 +5,25 @@ 4.0.0 - org.semanticweb.vlog4j - vlog4j-parent + org.semanticweb.rulewerk + rulewerk-parent 0.6.0-SNAPSHOT pom - VLog4j + Rulewerk A Java library for working with the VLog rule engine - https://github.com/knowsys/vlog4j + https://github.com/knowsys/rulewerk - - vlog4j-core - vlog4j-rdf - vlog4j-examples - vlog4j-owlapi - vlog4j-graal - vlog4j-parser - vlog4j-client + rulewerk-core + rulewerk-rdf + rulewerk-examples + rulewerk-owlapi + rulewerk-graal + rulewerk-parser + rulewerk-client coverage @@ -38,7 +38,7 @@ - VLog4j Developers + Rulewerk Developers @@ -57,6 +57,11 @@ David Carral david.carral@tu-dresden.de + + maximilian + Maximilian Marx + maximilian.marx@tu-dresden.de + @@ -144,7 +149,7 @@ - org.eclipse.m2e lifecycle-mapping @@ -220,7 +225,7 @@ license-maven-plugin - org.apache.maven.plugins maven-compiler-plugin @@ -280,8 +285,8 @@ test - ${project.reporting.outputDirectory}/jacoco-ut @@ -289,7 +294,7 @@ - **/javacc/JavaCCParser.class **/javacc/JavaCCParserConstants.class @@ -303,14 +308,14 @@ - org.apache.maven.plugins maven-javadoc-plugin ${maven.javadoc.version} 1.8 - VLog4j homepage]]> + Rulewerk homepage]]> @@ -323,7 +328,7 @@ Publishing javadoc for ${project.artifactId}:${project.version} ${project.reporting.outputDirectory}/apidocs - scm:git:https://github.com/knowsys/vlog4j.git + scm:git:https://github.com/knowsys/rulewerk.git gh-pages @@ -411,9 +416,9 @@ - https://github.com/knowsys/vlog4j.git - scm:git:https://github.com/knowsys/vlog4j.git - scm:git:https://github.com/knowsys/vlog4j.git + https://github.com/knowsys/rulewerk.git + scm:git:https://github.com/knowsys/rulewerk.git + scm:git:https://github.com/knowsys/rulewerk.git diff --git a/vlog4j-client/LICENSE.txt b/rulewerk-client/LICENSE.txt similarity index 100% rename from vlog4j-client/LICENSE.txt rename to rulewerk-client/LICENSE.txt diff --git a/vlog4j-client/pom.xml b/rulewerk-client/pom.xml similarity index 82% rename from vlog4j-client/pom.xml rename to rulewerk-client/pom.xml index 0e7b14f00..cc051d591 100644 --- a/vlog4j-client/pom.xml +++ b/rulewerk-client/pom.xml @@ -5,16 +5,16 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"> 4.0.0 - org.semanticweb.vlog4j - vlog4j-parent + org.semanticweb.rulewerk + rulewerk-parent 0.6.0-SNAPSHOT - vlog4j-client + rulewerk-client jar - VLog4j Client - Stand-alone VLog4j application + Rulewerk Client + Stand-alone Rulewerk application UTF-8 @@ -23,15 +23,15 @@ ${project.groupId} - vlog4j-core + rulewerk-core ${project.version} ${project.groupId} - vlog4j-parser + rulewerk-parser ${project.version} - + org.slf4j slf4j-log4j12 ${slf4j.version} @@ -63,11 +63,11 @@ shade - standalone-vlog4j-client-${project.version} + standalone-rulewerk-client-${project.version} - org.semanticweb.vlog4j.client.picocli.VLog4jClient + org.semanticweb.rulewerk.client.picocli.RulewerkClient diff --git a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/ClientUtils.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/ClientUtils.java similarity index 92% rename from vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/ClientUtils.java rename to rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/ClientUtils.java index c0f81099c..4984fba5e 100644 --- a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/ClientUtils.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/ClientUtils.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.client.picocli; +package org.semanticweb.rulewerk.client.picocli; /*- * #%L @@ -26,9 +26,9 @@ import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.apache.log4j.PatternLayout; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; /** * Utility class for interacting with the vlog4j client. @@ -56,7 +56,7 @@ private ClientUtils() { * * Note: The VLog C++ backend performs its own logging. The log-level for this * can be configured using - * {@link Reasoner#setLogLevel(org.semanticweb.vlog4j.core.reasoner.LogLevel)}. + * {@link Reasoner#setLogLevel(org.semanticweb.rulewerk.core.reasoner.LogLevel)}. * It is also possible to specify a separate log file for this part of the logs. */ public static void configureLogging() { diff --git a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/PrintQueryResults.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResults.java similarity index 98% rename from vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/PrintQueryResults.java rename to rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResults.java index 1fb824007..935dde8fc 100644 --- a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/PrintQueryResults.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResults.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.client.picocli; +package org.semanticweb.rulewerk.client.picocli; /*- * #%L diff --git a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveModel.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveModel.java similarity index 98% rename from vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveModel.java rename to rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveModel.java index 5d8fd08a7..1af92ac6a 100644 --- a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveModel.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveModel.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.client.picocli; +package org.semanticweb.rulewerk.client.picocli; /*- * #%L diff --git a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveQueryResults.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResults.java similarity index 98% rename from vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveQueryResults.java rename to rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResults.java index b694cb532..5e593b00c 100644 --- a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveQueryResults.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResults.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.client.picocli; +package org.semanticweb.rulewerk.client.picocli; /*- * #%L diff --git a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClient.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClient.java similarity index 96% rename from vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClient.java rename to rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClient.java index a0535ec79..8663d80d4 100644 --- a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClient.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClient.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.client.picocli; +package org.semanticweb.rulewerk.client.picocli; /*- * #%L diff --git a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClientMaterialize.java similarity index 93% rename from vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java rename to rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClientMaterialize.java index 44969e879..52bc1e777 100644 --- a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClientMaterialize.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.client.picocli; +package org.semanticweb.rulewerk.client.picocli; /*- * #%L @@ -26,14 +26,14 @@ import java.util.ArrayList; import java.util.List; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.reasoner.Algorithm; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.LogLevel; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.vlog4j.parser.ParsingException; -import org.semanticweb.vlog4j.parser.RuleParser; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.reasoner.Algorithm; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.LogLevel; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; import picocli.CommandLine.ArgGroup; import picocli.CommandLine.Command; import picocli.CommandLine.Option; diff --git a/vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/PrintQueryResultsTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResultsTest.java similarity index 96% rename from vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/PrintQueryResultsTest.java rename to rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResultsTest.java index 167b66fb4..6cf1df352 100644 --- a/vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/PrintQueryResultsTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResultsTest.java @@ -1,10 +1,10 @@ -package org.semanticweb.vlog4j.client.picocli; +package org.semanticweb.rulewerk.client.picocli; /*- * #%L - * VLog4j Client + * Rulewerk Client * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2019 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/SaveModelTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/SaveModelTest.java similarity index 99% rename from vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/SaveModelTest.java rename to rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/SaveModelTest.java index 17074eb37..7a4b65532 100644 --- a/vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/SaveModelTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/SaveModelTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.client.picocli; +package org.semanticweb.rulewerk.client.picocli; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; diff --git a/vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/SaveQueryResultsTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResultsTest.java similarity index 99% rename from vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/SaveQueryResultsTest.java rename to rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResultsTest.java index 124511f5d..2ef16cb40 100644 --- a/vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/SaveQueryResultsTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResultsTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.client.picocli; +package org.semanticweb.rulewerk.client.picocli; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; diff --git a/vlog4j-core/LICENSE.txt b/rulewerk-core/LICENSE.txt similarity index 100% rename from vlog4j-core/LICENSE.txt rename to rulewerk-core/LICENSE.txt diff --git a/vlog4j-core/pom.xml b/rulewerk-core/pom.xml similarity index 79% rename from vlog4j-core/pom.xml rename to rulewerk-core/pom.xml index 3c51c676d..991c14904 100644 --- a/vlog4j-core/pom.xml +++ b/rulewerk-core/pom.xml @@ -1,65 +1,65 @@ - - - 4.0.0 - - - org.semanticweb.vlog4j - vlog4j-parent - 0.6.0-SNAPSHOT - - - vlog4j-core - jar - - VLog4j Core Components - Core components of VLog4j: reasoner and model - - - 1.3.3-snapshot - - - - - - ${project.groupId} - vlog4j-base - ${karmaresearch.vlog.version} - - - - - - development - - - - - - org.apache.maven.plugins - maven-install-plugin - 2.4 - - - initialize - - install-file - - - ${project.groupId} - vlog4j-base - ${karmaresearch.vlog.version} - jar - ./lib/jvlog-local.jar - - - - - - - - - + + + 4.0.0 + + + org.semanticweb.rulewerk + rulewerk-parent + 0.6.0-SNAPSHOT + + + rulewerk-core + jar + + Rulewerk Core Components + Core components of Rulewerk: reasoner and model + + + 1.3.3-snapshot + + + + + + ${project.groupId} + rulewerk-base + ${karmaresearch.vlog.version} + + + + + + development + + + + + + org.apache.maven.plugins + maven-install-plugin + 2.4 + + + initialize + + install-file + + + ${project.groupId} + rulewerk-base + ${karmaresearch.vlog.version} + jar + ./lib/jvlog-local.jar + + + + + + + + + diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/IncompatiblePredicateArityException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/IncompatiblePredicateArityException.java similarity index 89% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/IncompatiblePredicateArityException.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/IncompatiblePredicateArityException.java index db42ff58c..a274e91cf 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/IncompatiblePredicateArityException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/IncompatiblePredicateArityException.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.exceptions; +package org.semanticweb.rulewerk.core.exceptions; /*- * #%L @@ -22,8 +22,8 @@ import java.text.MessageFormat; -import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.Predicate; /** * Expression thrown when attempting to load facts for a {@link Predicate} from diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/PrefixDeclarationException.java similarity index 95% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/PrefixDeclarationException.java index 6424a028b..afd7ec5bd 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/PrefixDeclarationException.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.exceptions; +package org.semanticweb.rulewerk.core.exceptions; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/ReasonerStateException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/ReasonerStateException.java similarity index 92% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/ReasonerStateException.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/ReasonerStateException.java index 7d26dbd47..bd28395dc 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/ReasonerStateException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/ReasonerStateException.java @@ -18,11 +18,11 @@ * #L% */ -package org.semanticweb.vlog4j.core.exceptions; +package org.semanticweb.rulewerk.core.exceptions; import java.text.MessageFormat; -import org.semanticweb.vlog4j.core.reasoner.ReasonerState; +import org.semanticweb.rulewerk.core.reasoner.ReasonerState; /** * Thrown when an operation that is invalid in current reasoner state is diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/VLog4jException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jException.java similarity index 96% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/VLog4jException.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jException.java index c87c6ca3b..7d848760a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/VLog4jException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jException.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.exceptions; +package org.semanticweb.rulewerk.core.exceptions; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/VLog4jRuntimeException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jRuntimeException.java similarity index 95% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/VLog4jRuntimeException.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jRuntimeException.java index 56fd985a6..d0adc72e8 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/VLog4jRuntimeException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jRuntimeException.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.exceptions; +package org.semanticweb.rulewerk.core.exceptions; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/AbstractConstant.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/AbstractConstant.java similarity index 91% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/AbstractConstant.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/AbstractConstant.java index 41f7bd3ce..9ce72ce46 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/AbstractConstant.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/AbstractConstant.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L @@ -22,7 +22,7 @@ import java.util.function.Function; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** * Interface for abstract constants, i.e. for constants that represent an diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Conjunction.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java similarity index 91% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Conjunction.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java index 550d3fa4e..8251cc869 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Conjunction.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L @@ -22,7 +22,7 @@ import java.util.List; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** * Interface for representing conjunctions of {@link Literal}s, i.e., lists of diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Constant.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Constant.java similarity index 94% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Constant.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Constant.java index 850ae6f58..0a9c431cd 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Constant.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Constant.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /* * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSource.java similarity index 95% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSource.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSource.java index 888d30f77..83390c1c7 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSource.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSourceDeclaration.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java similarity index 91% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSourceDeclaration.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java index 954574e1f..398e7811a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSourceDeclaration.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java @@ -1,6 +1,6 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DatatypeConstant.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java similarity index 94% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DatatypeConstant.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java index d4dfe19e8..fb75afea7 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DatatypeConstant.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L @@ -22,7 +22,7 @@ import java.util.function.Function; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** * Interface for datatype constants, i.e. for constants that represent a diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Entity.java similarity index 95% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Entity.java index d5fd0306e..541f0c598 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Entity.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/ExistentialVariable.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/ExistentialVariable.java similarity index 90% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/ExistentialVariable.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/ExistentialVariable.java index 6952de6c3..1ca388565 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/ExistentialVariable.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/ExistentialVariable.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L @@ -20,7 +20,7 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** * Interface for existentially quantified variables, i.e., variables that appear diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Fact.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Fact.java similarity index 89% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Fact.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Fact.java index 36e7c1fef..ff082b2eb 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Fact.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Fact.java @@ -1,6 +1,6 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/LanguageStringConstant.java similarity index 94% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/LanguageStringConstant.java index 0f1296d0f..e10e49fbb 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/LanguageStringConstant.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L @@ -22,7 +22,7 @@ import java.util.function.Function; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** * Interface for string constants with a language tag, used to represent values diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Literal.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java similarity index 92% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Literal.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java index 791615697..663ecf4a8 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Literal.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L @@ -22,7 +22,7 @@ import java.util.List; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** * Interface for literals. A positive literal is simply an atomic formula, i.e., diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/NamedNull.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NamedNull.java similarity index 91% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/NamedNull.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NamedNull.java index 297692483..921efca58 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/NamedNull.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NamedNull.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /* * #%L @@ -22,7 +22,7 @@ import java.util.function.Function; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** * Interface for {@link TermType#NAMED_NULL} terms. A blank is an entity used to diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/NegativeLiteral.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NegativeLiteral.java similarity index 94% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/NegativeLiteral.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NegativeLiteral.java index 1321d77c7..90caac997 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/NegativeLiteral.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NegativeLiteral.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PositiveLiteral.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PositiveLiteral.java similarity index 94% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PositiveLiteral.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PositiveLiteral.java index 90c5f770f..66fa04ad2 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PositiveLiteral.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PositiveLiteral.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Predicate.java similarity index 92% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Predicate.java index c7b92761b..0a0dc5808 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Predicate.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L @@ -20,7 +20,7 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** * A Predicate represents a relation between terms. Is uniquely identified by diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java similarity index 96% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarationRegistry.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java index 874889211..eb1549f4f 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L @@ -22,7 +22,7 @@ import java.util.Map.Entry; -import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; /** * Registry that manages prefixes and base namespace declarations as used for diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/QueryResult.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/QueryResult.java similarity index 92% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/QueryResult.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/QueryResult.java index 0ea7e1f8e..f3d1cc6d7 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/QueryResult.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/QueryResult.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Rule.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Rule.java similarity index 92% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Rule.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Rule.java index 804524b80..8f7b4ee33 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Rule.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Rule.java @@ -1,6 +1,6 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Statement.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Statement.java similarity index 95% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Statement.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Statement.java index 464397b18..69c6f83c8 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Statement.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Statement.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/StatementVisitor.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/StatementVisitor.java similarity index 96% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/StatementVisitor.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/StatementVisitor.java index fc556f18e..ced6c05dc 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/StatementVisitor.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/StatementVisitor.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/SyntaxObject.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/SyntaxObject.java similarity index 98% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/SyntaxObject.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/SyntaxObject.java index f41891f8c..73dcafc12 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/SyntaxObject.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/SyntaxObject.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Term.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Term.java similarity index 97% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Term.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Term.java index fb52ad009..0c631d653 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Term.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Term.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /* * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/TermType.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/TermType.java similarity index 97% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/TermType.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/TermType.java index 94ca40a9f..9453cb25c 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/TermType.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/TermType.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /* * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/TermVisitor.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/TermVisitor.java similarity index 97% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/TermVisitor.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/TermVisitor.java index 7ada2c0d4..1dad479be 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/TermVisitor.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/TermVisitor.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Terms.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Terms.java similarity index 98% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Terms.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Terms.java index c825c4179..2baf7355e 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Terms.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Terms.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/UniversalVariable.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/UniversalVariable.java similarity index 90% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/UniversalVariable.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/UniversalVariable.java index d152a3f73..12bc6ab19 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/UniversalVariable.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/UniversalVariable.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /* * #%L @@ -20,7 +20,7 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** * Interface for universally quantified variables, i.e., variables that appear diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Variable.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Variable.java similarity index 96% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Variable.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Variable.java index 308843bdf..69210b5d0 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Variable.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Variable.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /* * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractConstantImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractConstantImpl.java similarity index 87% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractConstantImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractConstantImpl.java index 0820e16de..699a96d41 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractConstantImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractConstantImpl.java @@ -1,7 +1,7 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; -import org.semanticweb.vlog4j.core.model.api.AbstractConstant; -import org.semanticweb.vlog4j.core.model.api.TermVisitor; +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.TermVisitor; /* * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractLiteralImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java similarity index 93% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractLiteralImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java index 5e2d141a3..ba4290138 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractLiteralImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L @@ -27,9 +27,9 @@ import org.apache.commons.lang3.Validate; import org.eclipse.jdt.annotation.NonNull; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; /** * Implements {@link Literal} objects. A literal is a formula of the form diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java similarity index 92% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractPrefixDeclarationRegistry.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java index d41bfabfb..2f58af65b 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L @@ -27,8 +27,8 @@ import java.util.Map; import java.util.Map.Entry; -import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; /** * Implementation of the common logic for prefix declaration registries. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractTermImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractTermImpl.java similarity index 93% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractTermImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractTermImpl.java index 0ec6e488f..c8040af01 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractTermImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractTermImpl.java @@ -1,7 +1,7 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; import org.apache.commons.lang3.Validate; -import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Term; /* * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ConjunctionImpl.java similarity index 89% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ConjunctionImpl.java index 8167f43c2..8f24855d3 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ConjunctionImpl.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L @@ -26,9 +26,9 @@ import java.util.stream.Stream; import org.apache.commons.lang3.Validate; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.Term; /** * Simple implementation of {@link Conjunction}. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DataSourceDeclarationImpl.java similarity index 87% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DataSourceDeclarationImpl.java index 2a905dcbb..1fcb6bd68 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DataSourceDeclarationImpl.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L @@ -21,10 +21,10 @@ */ import org.apache.commons.lang3.Validate; -import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.StatementVisitor; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.StatementVisitor; /** * Basic implementation for {@link DataSourceDeclaration}. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java similarity index 91% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java index a366f7fed..6f42312c0 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; import org.apache.commons.lang3.Validate; @@ -22,8 +22,8 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; -import org.semanticweb.vlog4j.core.model.api.TermVisitor; +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; +import org.semanticweb.rulewerk.core.model.api.TermVisitor; /** * Simple implementation of {@link DatatypeConstant}. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ExistentialVariableImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ExistentialVariableImpl.java similarity index 86% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ExistentialVariableImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ExistentialVariableImpl.java index 685d273a9..953d92c31 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ExistentialVariableImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ExistentialVariableImpl.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L @@ -20,8 +20,8 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.api.TermVisitor; -import org.semanticweb.vlog4j.core.model.api.ExistentialVariable; +import org.semanticweb.rulewerk.core.model.api.TermVisitor; +import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; /** * Simple implementation of {@link ExistentialVariable}. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java similarity index 93% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java index 2ffbfcf28..fcb3e01eb 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; import java.util.ArrayList; @@ -25,19 +25,19 @@ import java.util.Arrays; import java.util.List; -import org.semanticweb.vlog4j.core.model.api.AbstractConstant; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; -import org.semanticweb.vlog4j.core.model.api.ExistentialVariable; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.LanguageStringConstant; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.UniversalVariable; +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; +import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.NegativeLiteral; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; /** * This utilities class provides static methods for creating terms and formulas diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/FactImpl.java similarity index 81% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/FactImpl.java index e1712dd37..60ee41579 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/FactImpl.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L @@ -22,10 +22,10 @@ import java.util.List; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.StatementVisitor; -import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.StatementVisitor; +import org.semanticweb.rulewerk.core.model.api.Term; /** * Standard implementation of the {@link Fact} interface. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/LanguageStringConstantImpl.java similarity index 91% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/LanguageStringConstantImpl.java index 8d0bb26f3..9a112ba09 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/LanguageStringConstantImpl.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L @@ -21,8 +21,8 @@ */ import org.apache.commons.lang3.Validate; -import org.semanticweb.vlog4j.core.model.api.LanguageStringConstant; -import org.semanticweb.vlog4j.core.model.api.TermVisitor; +import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; +import org.semanticweb.rulewerk.core.model.api.TermVisitor; /** * Simple implementation of {@link LanguageStringConstant}. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergingPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java similarity index 97% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergingPrefixDeclarationRegistry.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java index bf1d14f8d..a63f73950 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergingPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L @@ -27,7 +27,7 @@ import java.util.Map; import java.util.Map.Entry; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; /** * Implementation of {@link PrefixDeclarationRegistry} that is suitable for diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NamedNullImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NamedNullImpl.java similarity index 88% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NamedNullImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NamedNullImpl.java index 5b3a0adc6..ff41632ae 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NamedNullImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NamedNullImpl.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; /* * #%L @@ -20,8 +20,8 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.api.NamedNull; -import org.semanticweb.vlog4j.core.model.api.TermVisitor; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.TermVisitor; /** * Implements {@link NamedNull} terms. A null is an entity used to represent diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NegativeLiteralImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImpl.java similarity index 80% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NegativeLiteralImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImpl.java index ceb0a145c..740e8af97 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NegativeLiteralImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImpl.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L @@ -23,9 +23,9 @@ import java.util.List; import org.eclipse.jdt.annotation.NonNull; -import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.NegativeLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; public class NegativeLiteralImpl extends AbstractLiteralImpl implements NegativeLiteral { diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PositiveLiteralImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImpl.java similarity index 80% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PositiveLiteralImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImpl.java index 84c2899e5..83eb4e4e0 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PositiveLiteralImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImpl.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L @@ -23,9 +23,9 @@ import java.util.List; import org.eclipse.jdt.annotation.NonNull; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; public class PositiveLiteralImpl extends AbstractLiteralImpl implements PositiveLiteral { diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PredicateImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java similarity index 90% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PredicateImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java index 38fac8686..25e9d3c21 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PredicateImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L @@ -22,7 +22,7 @@ import org.apache.commons.lang3.Validate; import org.eclipse.jdt.annotation.NonNull; -import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Predicate; /** * Implementation for {@link Predicate}. Supports predicates of arity 1 or diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RenamedNamedNull.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RenamedNamedNull.java similarity index 84% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RenamedNamedNull.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RenamedNamedNull.java index ef05b14d2..d54bf8512 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RenamedNamedNull.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RenamedNamedNull.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; import java.util.UUID; @@ -22,8 +22,8 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.api.NamedNull; -import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; /** * A {@link NamedNull} term that has been renamed during parsing. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RuleImpl.java similarity index 88% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RuleImpl.java index 4ffbae0de..acd038dc0 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RuleImpl.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; import java.util.Set; import java.util.stream.Collectors; @@ -26,13 +26,13 @@ import java.util.stream.Stream; import org.apache.commons.lang3.Validate; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.StatementVisitor; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.UniversalVariable; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.StatementVisitor; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; /** * Implementation for {@link Rule}. Represents rules with non-empty heads and diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java similarity index 92% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index 2d10dc4a5..6ee191e48 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -1,11 +1,11 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; import java.util.List; import java.util.Map.Entry; import java.util.function.Function; -import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; -import org.semanticweb.vlog4j.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; /*- * #%L @@ -27,25 +27,25 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; -import org.semanticweb.vlog4j.core.model.api.ExistentialVariable; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.LanguageStringConstant; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.NamedNull; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.UniversalVariable; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.FileDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; +import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; /** * A utility class with static methods to obtain the correct parsable string diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/UniversalVariableImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/UniversalVariableImpl.java similarity index 86% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/UniversalVariableImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/UniversalVariableImpl.java index e21cf3e9c..1211841b4 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/UniversalVariableImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/UniversalVariableImpl.java @@ -1,7 +1,7 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; -import org.semanticweb.vlog4j.core.model.api.TermVisitor; -import org.semanticweb.vlog4j.core.model.api.UniversalVariable; +import org.semanticweb.rulewerk.core.model.api.TermVisitor; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; /* * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/AcyclicityNotion.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/AcyclicityNotion.java similarity index 94% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/AcyclicityNotion.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/AcyclicityNotion.java index 0731a8838..36676c4ae 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/AcyclicityNotion.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/AcyclicityNotion.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner; +package org.semanticweb.rulewerk.core.reasoner; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Algorithm.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Algorithm.java similarity index 92% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Algorithm.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Algorithm.java index 8952a039c..ea3994d5a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Algorithm.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Algorithm.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner; +package org.semanticweb.rulewerk.core.reasoner; /* * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Correctness.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Correctness.java similarity index 96% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Correctness.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Correctness.java index 93028cca9..1c6d077bd 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Correctness.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Correctness.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner; +package org.semanticweb.rulewerk.core.reasoner; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/CyclicityResult.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/CyclicityResult.java similarity index 96% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/CyclicityResult.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/CyclicityResult.java index 0ce1fca55..2e289278b 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/CyclicityResult.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/CyclicityResult.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner; +package org.semanticweb.rulewerk.core.reasoner; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java similarity index 94% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java index e7b0e8a77..90cc3bb74 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner; +package org.semanticweb.rulewerk.core.reasoner; /*- * #%L @@ -37,17 +37,17 @@ import java.util.Set; import org.apache.commons.lang3.Validate; -import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; -import org.semanticweb.vlog4j.core.exceptions.VLog4jException; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Statement; -import org.semanticweb.vlog4j.core.model.api.StatementVisitor; -import org.semanticweb.vlog4j.core.model.implementation.MergingPrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.exceptions.VLog4jException; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Statement; +import org.semanticweb.rulewerk.core.model.api.StatementVisitor; +import org.semanticweb.rulewerk.core.model.implementation.MergingPrefixDeclarationRegistry; /** * A knowledge base with rules, facts, and declarations for loading data from @@ -449,10 +449,10 @@ Map> getFactsByPredicate() { * KnowledgeBase. * * This is essentially - * {@link org.semanticweb.vlog4j.parser.RuleParser#parseInto}, but we need to + * {@link org.semanticweb.rulewerk.parser.RuleParser#parseInto}, but we need to * avoid a circular dependency here -- this is also why we throw * {@link VLog4jException} instead of - * {@link org.semanticweb.vlog4j.parser.ParsingException}. + * {@link org.semanticweb.rulewerk.parser.ParsingException}. */ @FunctionalInterface public interface AdditionalInputParser { diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseListener.java similarity index 94% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseListener.java index ddbd4fa7a..127504d46 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseListener.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner; +package org.semanticweb.rulewerk.core.reasoner; /*- * #%L @@ -22,7 +22,7 @@ import java.util.List; -import org.semanticweb.vlog4j.core.model.api.Statement; +import org.semanticweb.rulewerk.core.model.api.Statement; /** * Listener to {@link KnowledgeBase} content change events. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/LogLevel.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LogLevel.java similarity index 93% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/LogLevel.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LogLevel.java index 9b9ca1e1d..fa1a54d45 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/LogLevel.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LogLevel.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner; +package org.semanticweb.rulewerk.core.reasoner; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswerCount.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/QueryAnswerCount.java similarity index 98% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswerCount.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/QueryAnswerCount.java index 3438c5e34..56cf95bcf 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswerCount.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/QueryAnswerCount.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner; +package org.semanticweb.rulewerk.core.reasoner; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryResultIterator.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/QueryResultIterator.java similarity index 92% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryResultIterator.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/QueryResultIterator.java index 743497b26..99d08f05e 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryResultIterator.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/QueryResultIterator.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner; +package org.semanticweb.rulewerk.core.reasoner; /*- * #%L @@ -22,7 +22,7 @@ import java.util.Iterator; -import org.semanticweb.vlog4j.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.QueryResult; /** * Iterator for {@link QueryResult}s. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java similarity index 97% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java index 444d4c615..04138ef5a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java @@ -1,20 +1,20 @@ -package org.semanticweb.vlog4j.core.reasoner; +package org.semanticweb.rulewerk.core.reasoner; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.ExistentialVariable; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.NamedNull; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.QueryResult; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; /* * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/ReasonerState.java similarity index 97% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/ReasonerState.java index 5a30d7359..bf22ef019 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/ReasonerState.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner; +package org.semanticweb.rulewerk.core.reasoner; /* * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/RuleRewriteStrategy.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/RuleRewriteStrategy.java similarity index 91% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/RuleRewriteStrategy.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/RuleRewriteStrategy.java index 142d2ea03..3aecb060a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/RuleRewriteStrategy.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/RuleRewriteStrategy.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner; +package org.semanticweb.rulewerk.core.reasoner; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/CsvFileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java similarity index 94% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/CsvFileDataSource.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java index 7998dd466..fee712e49 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/CsvFileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -25,7 +25,7 @@ import java.util.Arrays; import org.eclipse.jdt.annotation.NonNull; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** * An {@code CsvFileDataSource} stores facts in the CSV format inside a file of diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/EmptyQueryResultIterator.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/EmptyQueryResultIterator.java similarity index 82% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/EmptyQueryResultIterator.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/EmptyQueryResultIterator.java index 1d1cd1575..9c48bbb10 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/EmptyQueryResultIterator.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/EmptyQueryResultIterator.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -20,9 +20,9 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.api.QueryResult; -import org.semanticweb.vlog4j.core.reasoner.Correctness; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; /** * Iterator that represents an empty query result. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java similarity index 98% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSource.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java index 8c5fb7e21..b0761e238 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/InMemoryDataSource.java similarity index 95% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/InMemoryDataSource.java index c628cd023..13a7066a8 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/InMemoryDataSource.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -22,8 +22,8 @@ import java.util.Arrays; -import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.Fact; /** * A {@link DataSource} for representing a large number of facts that were diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverter.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java similarity index 87% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverter.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java index 25d089fee..1f2f943ee 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverter.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -23,19 +23,19 @@ import java.util.Collection; import java.util.List; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.reasoner.LogLevel; -import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.reasoner.LogLevel; +import org.semanticweb.rulewerk.core.reasoner.RuleRewriteStrategy; /** * Utility class with static methods for converting from VLog API model objects - * ({@code org.semanticweb.vlog4j.core.model}) to internal VLog model objects + * ({@code org.semanticweb.rulewerk.core.model}) to internal VLog model objects * ({@code karmaresearch.vlog}). * * @author Irina Dragoste diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerCountImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountImpl.java similarity index 88% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerCountImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountImpl.java index 68deeb6f0..27814ab4e 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerCountImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountImpl.java @@ -1,7 +1,7 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; -import org.semanticweb.vlog4j.core.reasoner.Correctness; -import org.semanticweb.vlog4j.core.reasoner.QueryAnswerCount; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.QueryAnswerCount; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java similarity index 90% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java index b02bf4153..6727fd558 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /* * #%L @@ -22,8 +22,8 @@ import java.util.List; -import org.semanticweb.vlog4j.core.model.api.QueryResult; -import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Term; /** * Implements {@link QueryResult}s. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java similarity index 94% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSource.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java index eb3ce09ea..43e1c44b1 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -25,7 +25,7 @@ import java.util.Arrays; import java.util.Optional; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** * An {@code RdfFileDataSource} stores facts in the RDF N-Triples format inside diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/Skolemization.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java similarity index 90% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/Skolemization.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java index 2a6269ebb..ddde0498a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/Skolemization.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -24,8 +24,8 @@ import java.io.IOException; import java.util.UUID; -import org.semanticweb.vlog4j.core.model.api.NamedNull; -import org.semanticweb.vlog4j.core.model.implementation.RenamedNamedNull; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.implementation.RenamedNamedNull; /** * A class that implements skolemization of named null names. The same diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java similarity index 96% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java index 89db26939..b105f8d82 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -27,8 +27,8 @@ import org.apache.commons.lang3.Validate; import org.eclipse.jdt.annotation.NonNull; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** * A SparqlQueryResultDataSource provide the results of a SPARQL query on a diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/TermToVLogConverter.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java similarity index 84% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/TermToVLogConverter.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java index 400943c75..b83cc7a12 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/TermToVLogConverter.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -20,15 +20,15 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.api.NamedNull; -import org.semanticweb.vlog4j.core.model.api.TermType; -import org.semanticweb.vlog4j.core.model.api.AbstractConstant; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; -import org.semanticweb.vlog4j.core.model.api.ExistentialVariable; -import org.semanticweb.vlog4j.core.model.api.LanguageStringConstant; -import org.semanticweb.vlog4j.core.model.api.TermVisitor; -import org.semanticweb.vlog4j.core.model.api.UniversalVariable; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.TermType; +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; +import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; +import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; +import org.semanticweb.rulewerk.core.model.api.TermVisitor; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; /** * A visitor that converts {@link Term}s of different types to corresponding diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogDataSource.java similarity index 90% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogDataSource.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogDataSource.java index 0cab0e979..d03b1b118 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogDataSource.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -20,7 +20,7 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.DataSource; /** * Abstract base class for VLog-specific data sources. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogKnowledgeBase.java similarity index 89% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogKnowledgeBase.java index e121399a9..c66b3094d 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogKnowledgeBase.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -30,22 +30,22 @@ import java.util.Map.Entry; import java.util.Set; -import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Statement; -import org.semanticweb.vlog4j.core.model.api.StatementVisitor; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; -import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; -import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; -import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; -import org.semanticweb.vlog4j.core.model.implementation.UniversalVariableImpl; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Statement; +import org.semanticweb.rulewerk.core.model.api.StatementVisitor; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.ConjunctionImpl; +import org.semanticweb.rulewerk.core.model.implementation.PositiveLiteralImpl; +import org.semanticweb.rulewerk.core.model.implementation.PredicateImpl; +import org.semanticweb.rulewerk.core.model.implementation.RuleImpl; +import org.semanticweb.rulewerk.core.model.implementation.UniversalVariableImpl; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; /** * Class for organizing a Knowledge Base using vLog-specific data structures. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogQueryResultIterator.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogQueryResultIterator.java similarity index 87% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogQueryResultIterator.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogQueryResultIterator.java index 043aea636..835fe699a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogQueryResultIterator.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogQueryResultIterator.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /* * #%L @@ -20,9 +20,9 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.api.QueryResult; -import org.semanticweb.vlog4j.core.reasoner.Correctness; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; import karmaresearch.vlog.Term; import karmaresearch.vlog.TermQueryResultIterator; diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java similarity index 93% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java index cdec467b5..a6b48b5bf 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; import java.io.IOException; import java.io.OutputStream; @@ -10,30 +10,30 @@ import java.util.Set; import org.apache.commons.lang3.Validate; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Statement; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; -import org.semanticweb.vlog4j.core.reasoner.AcyclicityNotion; -import org.semanticweb.vlog4j.core.reasoner.Algorithm; -import org.semanticweb.vlog4j.core.reasoner.Correctness; -import org.semanticweb.vlog4j.core.reasoner.CyclicityResult; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.LogLevel; -import org.semanticweb.vlog4j.core.reasoner.QueryAnswerCount; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.ReasonerState; -import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; +import org.semanticweb.rulewerk.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.rulewerk.core.exceptions.ReasonerStateException; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Statement; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.reasoner.AcyclicityNotion; +import org.semanticweb.rulewerk.core.reasoner.Algorithm; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.CyclicityResult; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.LogLevel; +import org.semanticweb.rulewerk.core.reasoner.QueryAnswerCount; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.ReasonerState; +import org.semanticweb.rulewerk.core.reasoner.RuleRewriteStrategy; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverter.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverter.java similarity index 86% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverter.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverter.java index 3be4fdebf..3864b4fb7 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverter.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverter.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /* * #%L @@ -23,19 +23,19 @@ import java.util.ArrayList; import java.util.List; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.QueryResult; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.DatatypeConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.LanguageStringConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.DatatypeConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.LanguageStringConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** * Utility class with static methods for converting from VLog internal model * ({@code karmaresearch.vlog} objects) to VLog API model - * ({@code org.semanticweb.vlog4j.core.model.api}) objects. + * ({@code org.semanticweb.rulewerk.core.model.api}) objects. * * @author Irina Dragoste * diff --git a/vlog4j-core/src/test/data/input/binaryFacts.csv b/rulewerk-core/src/test/data/input/binaryFacts.csv similarity index 100% rename from vlog4j-core/src/test/data/input/binaryFacts.csv rename to rulewerk-core/src/test/data/input/binaryFacts.csv diff --git a/vlog4j-core/src/test/data/input/constantD.csv b/rulewerk-core/src/test/data/input/constantD.csv similarity index 100% rename from vlog4j-core/src/test/data/input/constantD.csv rename to rulewerk-core/src/test/data/input/constantD.csv diff --git a/vlog4j-core/src/test/data/input/empty.csv b/rulewerk-core/src/test/data/input/empty.csv similarity index 100% rename from vlog4j-core/src/test/data/input/empty.csv rename to rulewerk-core/src/test/data/input/empty.csv diff --git a/vlog4j-core/src/test/data/input/invalidFormatNtFacts.nt b/rulewerk-core/src/test/data/input/invalidFormatNtFacts.nt similarity index 100% rename from vlog4j-core/src/test/data/input/invalidFormatNtFacts.nt rename to rulewerk-core/src/test/data/input/invalidFormatNtFacts.nt diff --git a/vlog4j-core/src/test/data/input/ternaryFacts.nt b/rulewerk-core/src/test/data/input/ternaryFacts.nt similarity index 100% rename from vlog4j-core/src/test/data/input/ternaryFacts.nt rename to rulewerk-core/src/test/data/input/ternaryFacts.nt diff --git a/vlog4j-core/src/test/data/input/ternaryFactsZipped.nt.gz b/rulewerk-core/src/test/data/input/ternaryFactsZipped.nt.gz similarity index 100% rename from vlog4j-core/src/test/data/input/ternaryFactsZipped.nt.gz rename to rulewerk-core/src/test/data/input/ternaryFactsZipped.nt.gz diff --git a/vlog4j-core/src/test/data/input/unaryFacts.csv b/rulewerk-core/src/test/data/input/unaryFacts.csv similarity index 100% rename from vlog4j-core/src/test/data/input/unaryFacts.csv rename to rulewerk-core/src/test/data/input/unaryFacts.csv diff --git a/vlog4j-core/src/test/data/input/unaryFactsCD.csv b/rulewerk-core/src/test/data/input/unaryFactsCD.csv similarity index 100% rename from vlog4j-core/src/test/data/input/unaryFactsCD.csv rename to rulewerk-core/src/test/data/input/unaryFactsCD.csv diff --git a/vlog4j-core/src/test/data/input/unaryFactsZipped.csv.gz b/rulewerk-core/src/test/data/input/unaryFactsZipped.csv.gz similarity index 100% rename from vlog4j-core/src/test/data/input/unaryFactsZipped.csv.gz rename to rulewerk-core/src/test/data/input/unaryFactsZipped.csv.gz diff --git a/vlog4j-core/src/test/data/output/.keep b/rulewerk-core/src/test/data/output/.keep similarity index 100% rename from vlog4j-core/src/test/data/output/.keep rename to rulewerk-core/src/test/data/output/.keep diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ConjunctionImplTest.java similarity index 94% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ConjunctionImplTest.java index db8fdcf30..2e6dba525 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ConjunctionImplTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model; +package org.semanticweb.rulewerk.core.model; /*- * #%L @@ -29,14 +29,14 @@ import org.junit.Test; import org.mockito.internal.util.collections.Sets; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.NegativeLiteral; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.ConjunctionImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; public class ConjunctionImplTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java similarity index 88% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java index c02772192..880172977 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model; +package org.semanticweb.rulewerk.core.model; /*- * #%L @@ -30,15 +30,15 @@ import org.junit.Ignore; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; -import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; public class DataSourceDeclarationTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/FactTest.java similarity index 81% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/FactTest.java index 0de3182d3..70763c3a4 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/FactTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model; +package org.semanticweb.rulewerk.core.model; /*- * #%L @@ -24,12 +24,12 @@ import java.util.Arrays; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.FactImpl; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.FactImpl; public class FactTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergingPrefixDeclarationRegistryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java similarity index 96% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergingPrefixDeclarationRegistryTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java index 5aad3d706..c6202e864 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergingPrefixDeclarationRegistryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model; +package org.semanticweb.rulewerk.core.model; /*- * #%L @@ -24,9 +24,9 @@ import org.junit.Before; import org.junit.Test; -import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; -import org.semanticweb.vlog4j.core.model.implementation.MergingPrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.implementation.MergingPrefixDeclarationRegistry; public class MergingPrefixDeclarationRegistryTest { private MergingPrefixDeclarationRegistry prefixDeclarations; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/NegativeLiteralImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/NegativeLiteralImplTest.java similarity index 89% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/NegativeLiteralImplTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/NegativeLiteralImplTest.java index d22881e84..c97f71504 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/NegativeLiteralImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/NegativeLiteralImplTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model; +package org.semanticweb.rulewerk.core.model; /*- * #%L @@ -28,15 +28,15 @@ import java.util.Arrays; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.NegativeLiteralImpl; -import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; -import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.NegativeLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.NegativeLiteralImpl; +import org.semanticweb.rulewerk.core.model.implementation.PositiveLiteralImpl; +import org.semanticweb.rulewerk.core.model.implementation.PredicateImpl; public class NegativeLiteralImplTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PositiveLiteralImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PositiveLiteralImplTest.java similarity index 89% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PositiveLiteralImplTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PositiveLiteralImplTest.java index 7c8d791a7..265f096c9 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PositiveLiteralImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PositiveLiteralImplTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model; +package org.semanticweb.rulewerk.core.model; /*- * #%L @@ -26,15 +26,15 @@ import java.util.Arrays; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.NegativeLiteralImpl; -import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; -import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.NegativeLiteralImpl; +import org.semanticweb.rulewerk.core.model.implementation.PositiveLiteralImpl; +import org.semanticweb.rulewerk.core.model.implementation.PredicateImpl; public class PositiveLiteralImplTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PredicateImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PredicateImplTest.java similarity index 89% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PredicateImplTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PredicateImplTest.java index 508f4c90d..7c398b37b 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PredicateImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PredicateImplTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model; +package org.semanticweb.rulewerk.core.model; /*- * #%L @@ -25,9 +25,9 @@ import static org.junit.Assert.assertNotEquals; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.PredicateImpl; public class PredicateImplTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/RuleImplTest.java similarity index 90% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/RuleImplTest.java index 0a406ec18..47c759252 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/RuleImplTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model; +package org.semanticweb.rulewerk.core.model; /*- * #%L @@ -27,17 +27,17 @@ import java.util.List; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.LanguageStringConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.NegativeLiteral; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.ConjunctionImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.LanguageStringConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.RuleImpl; public class RuleImplTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/TermImplTest.java similarity index 87% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/TermImplTest.java index 5093d5116..77f47b7b6 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/TermImplTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model; +package org.semanticweb.rulewerk.core.model; /*- * #%L @@ -24,17 +24,17 @@ import static org.junit.Assert.assertNotEquals; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; -import org.semanticweb.vlog4j.core.model.api.LanguageStringConstant; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.TermType; -import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.DatatypeConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.ExistentialVariableImpl; -import org.semanticweb.vlog4j.core.model.implementation.LanguageStringConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; -import org.semanticweb.vlog4j.core.model.implementation.UniversalVariableImpl; +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; +import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.TermType; +import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.DatatypeConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.ExistentialVariableImpl; +import org.semanticweb.rulewerk.core.model.implementation.LanguageStringConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.model.implementation.UniversalVariableImpl; public class TermImplTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java similarity index 92% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java index 8e0531a67..1305e8acc 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner; +package org.semanticweb.rulewerk.core.reasoner; /*- * #%L @@ -27,10 +27,10 @@ import org.junit.Before; import org.junit.Test; import org.mockito.internal.util.collections.Sets; -import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.MergingPrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.MergingPrefixDeclarationRegistry; public class KnowledgeBaseTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/LoggingTest.java similarity index 92% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/LoggingTest.java index c08ad616c..6b51bfe7a 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/LoggingTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner; +package org.semanticweb.rulewerk.core.reasoner; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -34,13 +34,13 @@ import org.junit.BeforeClass; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; public class LoggingTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/ReasonerTimeoutTest.java similarity index 91% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/ReasonerTimeoutTest.java index 226aee845..8e64d2915 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/ReasonerTimeoutTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner; +package org.semanticweb.rulewerk.core.reasoner; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; @@ -33,13 +33,13 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.rules.Timeout; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; /** * Test case ensuring {@link Reasoner#setReasoningTimeout(Integer)} works as diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java similarity index 94% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java index db5c356ae..5e073a77e 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -32,16 +32,16 @@ import org.junit.Ignore; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.Correctness; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; public class AddDataSourceTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AnswerQueryTest.java similarity index 94% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AnswerQueryTest.java index 40c21f29d..be9efb6c2 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AnswerQueryTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -36,19 +36,19 @@ import org.junit.Assert; import org.junit.Test; import org.mockito.internal.util.collections.Sets; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.QueryResult; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.TermType; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.Algorithm; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; -import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.TermType; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.Algorithm; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.RuleRewriteStrategy; public class AnswerQueryTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/CsvFileDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java similarity index 95% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/CsvFileDataSourceTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java index 930fdf785..22cc04bbb 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/CsvFileDataSourceTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -26,7 +26,7 @@ import java.io.IOException; import org.junit.Test; -import org.semanticweb.vlog4j.core.reasoner.implementation.FileDataSourceTestUtils; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSourceTestUtils; public class CsvFileDataSourceTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java similarity index 89% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java index a2b1b8036..945c4482a 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -33,13 +33,13 @@ import org.apache.commons.csv.CSVFormat; import org.apache.commons.csv.CSVParser; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.vlog4j.core.reasoner.Algorithm; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.reasoner.Algorithm; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; /** * Utility class for reading from and writing to data source files. diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java similarity index 89% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java index cb173d5e4..1460a5a7f 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -31,16 +31,16 @@ import org.junit.Test; import org.mockito.internal.util.collections.Sets; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.Algorithm; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.Algorithm; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.RuleRewriteStrategy; public class GeneratedAnonymousIndividualsTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverterTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java similarity index 91% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverterTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java index 4ef5c7322..a8b773a8d 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverterTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -30,17 +30,17 @@ import java.util.List; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.NamedNull; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.RuleRewriteStrategy; public class ModelToVLogConverterTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerCountTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountTest.java similarity index 95% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerCountTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountTest.java index 4e17d8bdf..9c5a993e8 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerCountTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -25,16 +25,16 @@ import java.io.IOException; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; public class QueryAnswerCountTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java similarity index 96% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java index 3d8eea89b..af30f5a3f 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -32,20 +32,20 @@ import java.util.Set; import org.junit.Test; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.Algorithm; -import org.semanticweb.vlog4j.core.reasoner.Correctness; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.exceptions.ReasonerStateException; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.Algorithm; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; public class QueryAnsweringCorrectnessTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImplTest.java similarity index 80% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultImplTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImplTest.java index 642f38985..0335dd845 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImplTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -28,11 +28,11 @@ import java.util.List; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.QueryResult; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultImpl; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.implementation.QueryResultImpl; public class QueryResultImplTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultsUtils.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultsUtils.java similarity index 88% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultsUtils.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultsUtils.java index 1ec594328..6bfbdf501 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultsUtils.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultsUtils.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -26,9 +26,9 @@ import java.util.List; import java.util.Set; -import org.semanticweb.vlog4j.core.model.api.QueryResult; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; /** * Utility class with static methods for collecting the results of a query for diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java similarity index 94% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSourceTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java index ba238ae78..ba4730f83 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSourceTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; import static org.junit.Assert.assertEquals; @@ -26,7 +26,7 @@ import java.io.IOException; import org.junit.Test; -import org.semanticweb.vlog4j.core.reasoner.implementation.FileDataSourceTestUtils; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSourceTestUtils; public class RdfFileDataSourceTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SkolemizationTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SkolemizationTest.java similarity index 95% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SkolemizationTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SkolemizationTest.java index bebe220d2..da644f50d 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SkolemizationTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SkolemizationTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -26,7 +26,7 @@ import org.junit.Before; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.NamedNull; public class SkolemizationTest { private Skolemization skolemization; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java similarity index 93% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java index bd950350d..fdc5ba789 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -30,8 +30,8 @@ import org.apache.commons.lang3.StringUtils; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; public class SparqlQueryResultDataSourceTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerBasics.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerBasics.java similarity index 80% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerBasics.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerBasics.java index b4e3a5aec..d27cb9282 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerBasics.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerBasics.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -30,16 +30,16 @@ import java.util.Set; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; public class VLogReasonerBasics { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCombinedInputs.java similarity index 88% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCombinedInputs.java index 084c1b321..dae941489 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCombinedInputs.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; import static org.junit.Assert.assertEquals; @@ -32,17 +32,17 @@ import java.util.Set; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; public class VLogReasonerCombinedInputs { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvInput.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvInput.java similarity index 85% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvInput.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvInput.java index 2b7d85c58..a7d1f066a 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvInput.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvInput.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -31,17 +31,17 @@ import org.junit.Test; import org.mockito.internal.util.collections.Sets; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.rulewerk.core.exceptions.ReasonerStateException; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; public class VLogReasonerCsvInput { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvOutput.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvOutput.java similarity index 93% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvOutput.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvOutput.java index 7f1c62838..3c4e023f3 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvOutput.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvOutput.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -31,12 +31,12 @@ import java.util.List; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; public class VLogReasonerCsvOutput { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerNegation.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerNegation.java similarity index 83% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerNegation.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerNegation.java index 10c4226fa..213889d55 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerNegation.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerNegation.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -28,16 +28,16 @@ import java.util.Arrays; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.QueryResult; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; public class VLogReasonerNegation { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerRdfInput.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerRdfInput.java similarity index 90% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerRdfInput.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerRdfInput.java index d278116a3..601e6f57b 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerRdfInput.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerRdfInput.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -32,13 +32,13 @@ import org.junit.Ignore; import org.junit.Test; import org.mockito.internal.util.collections.Sets; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; public class VLogReasonerRdfInput { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerSparqlInput.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerSparqlInput.java similarity index 93% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerSparqlInput.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerSparqlInput.java index 521812028..f5cb44aab 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerSparqlInput.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerSparqlInput.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; @@ -30,14 +30,14 @@ import org.junit.Ignore; import org.junit.Test; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.QueryResult; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; public class VLogReasonerSparqlInput { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerStateTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerStateTest.java similarity index 93% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerStateTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerStateTest.java index 6fb85e3c8..a524ec507 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerStateTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerStateTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -34,22 +34,22 @@ import java.util.Set; import org.junit.Test; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.Algorithm; -import org.semanticweb.vlog4j.core.reasoner.Correctness; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.ReasonerState; +import org.semanticweb.rulewerk.core.exceptions.ReasonerStateException; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.Algorithm; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.ReasonerState; public class VLogReasonerStateTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java similarity index 85% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java index 6dff4c70f..dcaf16a5a 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; import static org.junit.Assert.*; import static org.mockito.Mockito.*; @@ -17,20 +17,20 @@ import org.junit.Before; import org.junit.Test; import org.mockito.internal.util.collections.Sets; -import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; -import org.semanticweb.vlog4j.core.model.api.AbstractConstant; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.UniversalVariable; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; /*- * #%L diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverterTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverterTest.java similarity index 87% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverterTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverterTest.java index 64dd2469d..db398d51d 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverterTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverterTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -23,12 +23,12 @@ import static org.junit.Assert.assertTrue; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.AbstractConstant; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.DatatypeConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.LanguageStringConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.DatatypeConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.LanguageStringConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; public class VLogToModelConverterTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/ExportQueryResultToCsvFileTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/ExportQueryResultToCsvFileTest.java similarity index 94% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/ExportQueryResultToCsvFileTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/ExportQueryResultToCsvFileTest.java index c1f4c8580..8c1f38594 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/ExportQueryResultToCsvFileTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/ExportQueryResultToCsvFileTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.vlog; +package org.semanticweb.rulewerk.core.reasoner.vlog; /*- * #%L @@ -27,7 +27,7 @@ import java.util.List; import org.junit.Test; -import org.semanticweb.vlog4j.core.reasoner.implementation.FileDataSourceTestUtils; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSourceTestUtils; import karmaresearch.vlog.Atom; import karmaresearch.vlog.EDBConfigurationException; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/LargeAritiesTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/LargeAritiesTest.java similarity index 96% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/LargeAritiesTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/LargeAritiesTest.java index 3f4a364db..5d9f00983 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/LargeAritiesTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/LargeAritiesTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.vlog; +package org.semanticweb.rulewerk.core.reasoner.vlog; import static org.junit.Assert.assertArrayEquals; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/StratifiedNegationTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/StratifiedNegationTest.java similarity index 97% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/StratifiedNegationTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/StratifiedNegationTest.java index 052841090..b4b89ae14 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/StratifiedNegationTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/StratifiedNegationTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.vlog; +package org.semanticweb.rulewerk.core.reasoner.vlog; /*- * #%L @@ -25,7 +25,7 @@ import static org.junit.Assert.assertTrue; import org.junit.Test; -import org.semanticweb.vlog4j.core.reasoner.vlog.VLogExpressions; +import org.semanticweb.rulewerk.core.reasoner.vlog.VLogExpressions; import karmaresearch.vlog.Atom; import karmaresearch.vlog.EDBConfigurationException; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromCsvFileTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromCsvFileTest.java similarity index 97% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromCsvFileTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromCsvFileTest.java index a580baeb4..676dd79dd 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromCsvFileTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromCsvFileTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.vlog; +package org.semanticweb.rulewerk.core.reasoner.vlog; /*- * #%L @@ -28,7 +28,7 @@ import java.util.List; import org.junit.Test; -import org.semanticweb.vlog4j.core.reasoner.implementation.FileDataSourceTestUtils; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSourceTestUtils; import karmaresearch.vlog.AlreadyStartedException; import karmaresearch.vlog.Atom; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromMemoryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromMemoryTest.java similarity index 97% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromMemoryTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromMemoryTest.java index cbe1d005b..1d9b7ca0d 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromMemoryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromMemoryTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.vlog; +package org.semanticweb.rulewerk.core.reasoner.vlog; /* * #%L diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromRdfFileTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromRdfFileTest.java similarity index 96% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromRdfFileTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromRdfFileTest.java index 7eaf1d6ab..622888073 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromRdfFileTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromRdfFileTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.vlog; +package org.semanticweb.rulewerk.core.reasoner.vlog; /*- * #%L @@ -28,7 +28,7 @@ import java.util.List; import org.junit.Test; -import org.semanticweb.vlog4j.core.reasoner.implementation.FileDataSourceTestUtils; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSourceTestUtils; import karmaresearch.vlog.AlreadyStartedException; import karmaresearch.vlog.Atom; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogExpressions.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogExpressions.java similarity index 98% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogExpressions.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogExpressions.java index 5ddb466a7..d03bf4fac 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogExpressions.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogExpressions.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.vlog; +package org.semanticweb.rulewerk.core.reasoner.vlog; /*- * #%L diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogQueryResultUtils.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryResultUtils.java similarity index 97% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogQueryResultUtils.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryResultUtils.java index f2c4614ae..e61d46421 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogQueryResultUtils.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryResultUtils.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.vlog; +package org.semanticweb.rulewerk.core.reasoner.vlog; /*- * #%L diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogQueryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryTest.java similarity index 98% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogQueryTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryTest.java index 801c43e65..df38a52ab 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogQueryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.vlog; +package org.semanticweb.rulewerk.core.reasoner.vlog; /*- * #%L diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogTermNamesTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogTermNamesTest.java similarity index 98% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogTermNamesTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogTermNamesTest.java index 0124f6eb6..428e31226 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogTermNamesTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogTermNamesTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.vlog; +package org.semanticweb.rulewerk.core.reasoner.vlog; /*- * #%L diff --git a/vlog4j-examples/LICENSE.txt b/rulewerk-examples/LICENSE.txt similarity index 100% rename from vlog4j-examples/LICENSE.txt rename to rulewerk-examples/LICENSE.txt diff --git a/vlog4j-examples/README.md b/rulewerk-examples/README.md similarity index 100% rename from vlog4j-examples/README.md rename to rulewerk-examples/README.md diff --git a/vlog4j-examples/pom.xml b/rulewerk-examples/pom.xml similarity index 79% rename from vlog4j-examples/pom.xml rename to rulewerk-examples/pom.xml index 1e23eb09e..216564049 100644 --- a/vlog4j-examples/pom.xml +++ b/rulewerk-examples/pom.xml @@ -1,90 +1,89 @@ - - - - 4.0.0 - - - org.semanticweb.vlog4j - vlog4j-parent - 0.6.0-SNAPSHOT - - - vlog4j-examples - jar - - VLog4j Examples - Contains examples and usage instructions describing the basic functionality of VLog4j - - - - ${project.groupId} - vlog4j-core - ${project.version} - - - ${project.groupId} - vlog4j-owlapi - ${project.version} - - - ${project.groupId} - vlog4j-rdf - ${project.version} - - - ${project.groupId} - vlog4j-graal - ${project.version} - - - ${project.groupId} - vlog4j-parser - ${project.version} - - - org.slf4j - slf4j-log4j12 - ${slf4j.version} - - - - - org.openrdf.sesame - sesame-rio-turtle - - ${openrdf.sesame.version} - - - - - org.openrdf.sesame - sesame-rio-rdfxml - - ${openrdf.sesame.version} - - - - fr.lirmm.graphik - graal-io-dlgp - ${graal.version} - - - - - - - - - org.codehaus.mojo - cobertura-maven-plugin - 2.7 - - true - true - - - - - - + + + + 4.0.0 + + + org.semanticweb.rulewerk + rulewerk-parent + 0.6.0-SNAPSHOT + + + rulewerk-examples + jar + + Rulewerk Examples + Contains examples and usage instructions describing the basic functionality of Rulewerk + + + + ${project.groupId} + rulewerk-core + ${project.version} + + + ${project.groupId} + rulewerk-owlapi + ${project.version} + + + ${project.groupId} + rulewerk-rdf + ${project.version} + + + ${project.groupId} + rulewerk-graal + ${project.version} + + + ${project.groupId} + rulewerk-parser + ${project.version} + + + org.slf4j + slf4j-log4j12 + ${slf4j.version} + + + + + org.openrdf.sesame + sesame-rio-turtle + + ${openrdf.sesame.version} + + + + + org.openrdf.sesame + sesame-rio-rdfxml + + ${openrdf.sesame.version} + + + + fr.lirmm.graphik + graal-io-dlgp + ${graal.version} + + + + + + + + + org.codehaus.mojo + cobertura-maven-plugin + 2.7 + + true + true + + + + + diff --git a/vlog4j-examples/src/main/data/.gitignore b/rulewerk-examples/src/main/data/.gitignore similarity index 100% rename from vlog4j-examples/src/main/data/.gitignore rename to rulewerk-examples/src/main/data/.gitignore diff --git a/vlog4j-examples/src/main/data/input/bicycleEDB.csv.gz b/rulewerk-examples/src/main/data/input/bicycleEDB.csv.gz similarity index 100% rename from vlog4j-examples/src/main/data/input/bicycleEDB.csv.gz rename to rulewerk-examples/src/main/data/input/bicycleEDB.csv.gz diff --git a/vlog4j-examples/src/main/data/input/counting-triangles.rls b/rulewerk-examples/src/main/data/input/counting-triangles.rls similarity index 100% rename from vlog4j-examples/src/main/data/input/counting-triangles.rls rename to rulewerk-examples/src/main/data/input/counting-triangles.rls diff --git a/vlog4j-examples/src/main/data/input/doid.nt.gz b/rulewerk-examples/src/main/data/input/doid.nt.gz similarity index 100% rename from vlog4j-examples/src/main/data/input/doid.nt.gz rename to rulewerk-examples/src/main/data/input/doid.nt.gz diff --git a/vlog4j-examples/src/main/data/input/doid.rls b/rulewerk-examples/src/main/data/input/doid.rls similarity index 100% rename from vlog4j-examples/src/main/data/input/doid.rls rename to rulewerk-examples/src/main/data/input/doid.rls diff --git a/vlog4j-examples/src/main/data/input/graal/doid-example.dlgp b/rulewerk-examples/src/main/data/input/graal/doid-example.dlgp similarity index 100% rename from vlog4j-examples/src/main/data/input/graal/doid-example.dlgp rename to rulewerk-examples/src/main/data/input/graal/doid-example.dlgp diff --git a/vlog4j-examples/src/main/data/input/graal/example.dlgp b/rulewerk-examples/src/main/data/input/graal/example.dlgp similarity index 100% rename from vlog4j-examples/src/main/data/input/graal/example.dlgp rename to rulewerk-examples/src/main/data/input/graal/example.dlgp diff --git a/vlog4j-examples/src/main/data/input/hasPartEDB.csv.gz b/rulewerk-examples/src/main/data/input/hasPartEDB.csv.gz similarity index 100% rename from vlog4j-examples/src/main/data/input/hasPartEDB.csv.gz rename to rulewerk-examples/src/main/data/input/hasPartEDB.csv.gz diff --git a/vlog4j-examples/src/main/data/input/owl/bike.owl b/rulewerk-examples/src/main/data/input/owl/bike.owl similarity index 100% rename from vlog4j-examples/src/main/data/input/owl/bike.owl rename to rulewerk-examples/src/main/data/input/owl/bike.owl diff --git a/vlog4j-examples/src/main/data/input/rdf/iswc-2016-complete-alignments.rdf b/rulewerk-examples/src/main/data/input/rdf/iswc-2016-complete-alignments.rdf similarity index 100% rename from vlog4j-examples/src/main/data/input/rdf/iswc-2016-complete-alignments.rdf rename to rulewerk-examples/src/main/data/input/rdf/iswc-2016-complete-alignments.rdf diff --git a/vlog4j-examples/src/main/data/input/ternaryBicycleEDB.nt.gz b/rulewerk-examples/src/main/data/input/ternaryBicycleEDB.nt.gz similarity index 100% rename from vlog4j-examples/src/main/data/input/ternaryBicycleEDB.nt.gz rename to rulewerk-examples/src/main/data/input/ternaryBicycleEDB.nt.gz diff --git a/vlog4j-examples/src/main/data/input/wheelEDB.csv.gz b/rulewerk-examples/src/main/data/input/wheelEDB.csv.gz similarity index 100% rename from vlog4j-examples/src/main/data/input/wheelEDB.csv.gz rename to rulewerk-examples/src/main/data/input/wheelEDB.csv.gz diff --git a/vlog4j-examples/src/main/data/output/.keep b/rulewerk-examples/src/main/data/output/.keep similarity index 100% rename from vlog4j-examples/src/main/data/output/.keep rename to rulewerk-examples/src/main/data/output/.keep diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CompareWikidataDBpedia.java similarity index 92% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java rename to rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CompareWikidataDBpedia.java index 0e7d18b32..838a261f5 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CompareWikidataDBpedia.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.examples; +package org.semanticweb.rulewerk.examples; /*- * #%L @@ -22,11 +22,11 @@ import java.io.IOException; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.vlog4j.parser.ParsingException; -import org.semanticweb.vlog4j.parser.RuleParser; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; /** * This example shows how to integrate and compare the contents of two SPARQL diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CountingTriangles.java similarity index 89% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java rename to rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CountingTriangles.java index a97e5438e..fede387b0 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CountingTriangles.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.examples; +package org.semanticweb.rulewerk.examples; import java.io.FileInputStream; @@ -24,10 +24,10 @@ import java.io.IOException; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.vlog4j.parser.ParsingException; -import org.semanticweb.vlog4j.parser.RuleParser; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; /** * In this example we count the number of triangles in the reflexive diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/DoidExample.java similarity index 86% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java rename to rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/DoidExample.java index e919be73c..2f895847e 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/DoidExample.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.examples; +package org.semanticweb.rulewerk.examples; /*- * #%L @@ -25,12 +25,12 @@ import java.util.Arrays; import java.util.List; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.LogLevel; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.vlog4j.parser.ParsingException; -import org.semanticweb.vlog4j.parser.RuleParser; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.LogLevel; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; /** * This example reasons about human diseases, based on information from the diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java similarity index 87% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java rename to rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java index f39fbd664..dafb60680 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.examples; +package org.semanticweb.rulewerk.examples; /*- * #%L @@ -28,15 +28,15 @@ import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.apache.log4j.PatternLayout; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.Correctness; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.parser.ParsingException; -import org.semanticweb.vlog4j.parser.RuleParser; +import org.semanticweb.rulewerk.core.exceptions.ReasonerStateException; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; public final class ExamplesUtils { @@ -61,7 +61,7 @@ private ExamplesUtils() { * * Note: The VLog C++ backend performs its own logging. The log-level for this * can be configured using - * {@link Reasoner#setLogLevel(org.semanticweb.vlog4j.core.reasoner.LogLevel)}. + * {@link Reasoner#setLogLevel(org.semanticweb.rulewerk.core.reasoner.LogLevel)}. * It is also possible to specify a separate log file for this part of the logs. */ public static void configureLogging() { diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java similarity index 85% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java rename to rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java index 3a1702e91..6ea419e31 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.examples; +package org.semanticweb.rulewerk.examples; /*- * #%L @@ -22,14 +22,14 @@ import java.io.IOException; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.InMemoryDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.vlog4j.parser.ParsingException; -import org.semanticweb.vlog4j.parser.RuleParser; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; /** * This example shows how to reason efficiently with data sets generated in diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/SimpleReasoningExample.java similarity index 90% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java rename to rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/SimpleReasoningExample.java index b269047b7..9591dd3f0 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/SimpleReasoningExample.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.examples; +package org.semanticweb.rulewerk.examples; /*- * #%L @@ -22,11 +22,11 @@ import java.io.IOException; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.vlog4j.parser.ParsingException; -import org.semanticweb.vlog4j.parser.RuleParser; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; /** * This example demonstrates the basic usage of VLog4j for rule reasoning. We diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromCsvFile.java similarity index 87% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java rename to rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromCsvFile.java index d493a3eb5..9fcb968a3 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromCsvFile.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.examples.core; +package org.semanticweb.rulewerk.examples.core; /*- * #%L @@ -22,14 +22,14 @@ import java.io.IOException; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.vlog4j.examples.ExamplesUtils; -import org.semanticweb.vlog4j.parser.ParsingException; -import org.semanticweb.vlog4j.parser.RuleParser; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.examples.ExamplesUtils; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; /** * This example shows how facts can be imported from files in the CSV format. diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromRdfFile.java similarity index 86% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java rename to rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromRdfFile.java index 3a2cd3f8f..8e974a814 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromRdfFile.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.examples.core; +package org.semanticweb.rulewerk.examples.core; /*- * #%L @@ -22,15 +22,15 @@ import java.io.IOException; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.vlog4j.examples.ExamplesUtils; -import org.semanticweb.vlog4j.parser.ParsingException; -import org.semanticweb.vlog4j.parser.RuleParser; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.examples.ExamplesUtils; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; /** * This example shows how facts can be imported from files in the RDF N-Triples diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromSparqlQueryResults.java similarity index 86% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java rename to rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromSparqlQueryResults.java index fef9cfb81..6f3a92d3f 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromSparqlQueryResults.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.examples.core; +package org.semanticweb.rulewerk.examples.core; /*- * #%L @@ -26,20 +26,20 @@ import java.util.LinkedHashSet; import java.util.List; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; -import org.semanticweb.vlog4j.examples.ExamplesUtils; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.rulewerk.examples.ExamplesUtils; /** * This is a simple example of adding data from the result of a SPARQL query on diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java similarity index 92% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java rename to rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java index 4a429bedb..fdf14be9c 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.examples.core; +package org.semanticweb.rulewerk.examples.core; /*- * #%L @@ -23,11 +23,11 @@ import java.io.IOException; import org.eclipse.jdt.annotation.Nullable; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.LogLevel; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.parser.ParsingException; -import org.semanticweb.vlog4j.parser.RuleParser; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.LogLevel; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; /** * This class exemplifies setting a log file and log level for VLog reasoner diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/SkolemVsRestrictedChaseTermination.java similarity index 88% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java rename to rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/SkolemVsRestrictedChaseTermination.java index f9b46ff59..387893b9d 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/SkolemVsRestrictedChaseTermination.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.examples.core; +package org.semanticweb.rulewerk.examples.core; /*- * #%L @@ -22,13 +22,13 @@ import java.io.IOException; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.reasoner.Algorithm; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.vlog4j.examples.ExamplesUtils; -import org.semanticweb.vlog4j.parser.ParsingException; -import org.semanticweb.vlog4j.parser.RuleParser; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.reasoner.Algorithm; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.examples.ExamplesUtils; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; /** * This example shows non-termination of the Skolem Chase, versus termination of diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java similarity index 93% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java rename to rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java index 07bfafa49..ccd12e5ec 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.examples.graal; +package org.semanticweb.rulewerk.examples.graal; /*- * #%L @@ -27,11 +27,11 @@ import java.util.ArrayList; import java.util.List; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.examples.ExamplesUtils; -import org.semanticweb.vlog4j.graal.GraalConjunctiveQueryToRule; -import org.semanticweb.vlog4j.graal.GraalToVLog4JModelConverter; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.examples.ExamplesUtils; +import org.semanticweb.rulewerk.graal.GraalConjunctiveQueryToRule; +import org.semanticweb.rulewerk.graal.GraalToVLog4JModelConverter; import fr.lirmm.graphik.graal.api.core.Atom; import fr.lirmm.graphik.graal.api.core.ConjunctiveQuery; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromGraal.java similarity index 91% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java rename to rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromGraal.java index 497acef27..55e498784 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromGraal.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.examples.graal; +package org.semanticweb.rulewerk.examples.graal; /*- * #%L @@ -24,13 +24,13 @@ import java.util.ArrayList; import java.util.List; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.vlog4j.examples.ExamplesUtils; -import org.semanticweb.vlog4j.graal.GraalConjunctiveQueryToRule; -import org.semanticweb.vlog4j.graal.GraalToVLog4JModelConverter; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.examples.ExamplesUtils; +import org.semanticweb.rulewerk.graal.GraalConjunctiveQueryToRule; +import org.semanticweb.rulewerk.graal.GraalToVLog4JModelConverter; import fr.lirmm.graphik.graal.api.core.ConjunctiveQuery; import fr.lirmm.graphik.graal.io.dlp.DlgpParser; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java similarity index 84% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java rename to rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java index ecb80b742..9e7b7504e 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.examples.graal; +package org.semanticweb.rulewerk.examples.graal; /*- * #%L @@ -24,21 +24,21 @@ import java.io.IOException; import java.net.URL; -import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.vlog4j.examples.DoidExample; -import org.semanticweb.vlog4j.examples.ExamplesUtils; -import org.semanticweb.vlog4j.graal.GraalToVLog4JModelConverter; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.NegativeLiteral; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.examples.DoidExample; +import org.semanticweb.rulewerk.examples.ExamplesUtils; +import org.semanticweb.rulewerk.graal.GraalToVLog4JModelConverter; import fr.lirmm.graphik.graal.io.dlp.DlgpParser; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/owlapi/OwlOntologyToRulesAndFacts.java similarity index 84% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java rename to rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/owlapi/OwlOntologyToRulesAndFacts.java index f738449f4..67be04091 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/owlapi/OwlOntologyToRulesAndFacts.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.examples.owlapi; +package org.semanticweb.rulewerk.examples.owlapi; /*- * #%L @@ -29,18 +29,18 @@ import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyCreationException; import org.semanticweb.owlapi.model.OWLOntologyManager; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.vlog4j.examples.ExamplesUtils; -import org.semanticweb.vlog4j.owlapi.OwlToRulesConverter; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.examples.ExamplesUtils; +import org.semanticweb.rulewerk.owlapi.OwlToRulesConverter; /** * This example shows how vlog4j-owlapi library (class diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java similarity index 87% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java rename to rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java index cbb848562..d1aaa22e3 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.examples.rdf; +package org.semanticweb.rulewerk.examples.rdf; /*- * #%L @@ -37,19 +37,19 @@ import org.openrdf.rio.RDFParser; import org.openrdf.rio.Rio; import org.openrdf.rio.helpers.StatementCollector; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.vlog4j.examples.ExamplesUtils; -import org.semanticweb.vlog4j.parser.ParsingException; -import org.semanticweb.vlog4j.parser.RuleParser; -import org.semanticweb.vlog4j.rdf.RdfModelConverter; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.examples.ExamplesUtils; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; +import org.semanticweb.rulewerk.rdf.RdfModelConverter; /** * This example shows how vlog4j-rdf library's utility class diff --git a/vlog4j-examples/src/main/logs/.keep b/rulewerk-examples/src/main/logs/.keep similarity index 100% rename from vlog4j-examples/src/main/logs/.keep rename to rulewerk-examples/src/main/logs/.keep diff --git a/vlog4j-graal/LICENSE.txt b/rulewerk-graal/LICENSE.txt similarity index 100% rename from vlog4j-graal/LICENSE.txt rename to rulewerk-graal/LICENSE.txt diff --git a/vlog4j-graal/pom.xml b/rulewerk-graal/pom.xml similarity index 80% rename from vlog4j-graal/pom.xml rename to rulewerk-graal/pom.xml index 7487cf100..f58afbd16 100644 --- a/vlog4j-graal/pom.xml +++ b/rulewerk-graal/pom.xml @@ -4,15 +4,15 @@ 4.0.0 - org.semanticweb.vlog4j - vlog4j-parent + org.semanticweb.rulewerk + rulewerk-parent 0.6.0-SNAPSHOT - vlog4j-graal + rulewerk-graal jar - VLog4J Graal Import Components + Rulewerk Graal Import Components Components to import Graal data structures. @@ -29,7 +29,7 @@ ${project.groupId} - vlog4j-core + rulewerk-core ${project.version} diff --git a/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalConjunctiveQueryToRule.java b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConjunctiveQueryToRule.java similarity index 90% rename from vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalConjunctiveQueryToRule.java rename to rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConjunctiveQueryToRule.java index a740f6274..773bed754 100644 --- a/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalConjunctiveQueryToRule.java +++ b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConjunctiveQueryToRule.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.graal; +package org.semanticweb.rulewerk.graal; /*- @@ -24,12 +24,12 @@ import java.util.List; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; import fr.lirmm.graphik.graal.api.core.ConjunctiveQuery; diff --git a/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalConvertException.java b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConvertException.java similarity index 96% rename from vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalConvertException.java rename to rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConvertException.java index 33646fbf4..e02365c20 100644 --- a/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalConvertException.java +++ b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConvertException.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.graal; +package org.semanticweb.rulewerk.graal; /*- * #%L diff --git a/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverter.java b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverter.java similarity index 96% rename from vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverter.java rename to rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverter.java index 5b93b173a..3a03c5993 100644 --- a/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverter.java +++ b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverter.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.graal; +package org.semanticweb.rulewerk.graal; /*- * #%L @@ -27,13 +27,13 @@ import java.util.Set; import org.apache.commons.lang3.StringUtils; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; import fr.lirmm.graphik.graal.api.core.AtomSet; import fr.lirmm.graphik.graal.api.core.ConjunctiveQuery; diff --git a/vlog4j-graal/src/test/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverterTest.java b/rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverterTest.java similarity index 96% rename from vlog4j-graal/src/test/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverterTest.java rename to rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverterTest.java index 3642c100e..3e81a6909 100644 --- a/vlog4j-graal/src/test/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverterTest.java +++ b/rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverterTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.graal; +package org.semanticweb.rulewerk.graal; /*- * #%L @@ -27,13 +27,13 @@ import java.util.Collections; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; import fr.lirmm.graphik.graal.api.core.ConjunctiveQuery; import fr.lirmm.graphik.graal.api.io.ParseException; diff --git a/vlog4j-owlapi/LICENSE.txt b/rulewerk-owlapi/LICENSE.txt similarity index 100% rename from vlog4j-owlapi/LICENSE.txt rename to rulewerk-owlapi/LICENSE.txt diff --git a/vlog4j-owlapi/pom.xml b/rulewerk-owlapi/pom.xml similarity index 79% rename from vlog4j-owlapi/pom.xml rename to rulewerk-owlapi/pom.xml index 3433c9bee..6e9fab4c3 100644 --- a/vlog4j-owlapi/pom.xml +++ b/rulewerk-owlapi/pom.xml @@ -1,36 +1,36 @@ - - - 4.0.0 - - - org.semanticweb.vlog4j - vlog4j-parent - 0.6.0-SNAPSHOT - - - vlog4j-owlapi - jar - - VLog4j OWL API Support - Bindings and utilities for working with OWL ontologies using the OWL API - - - - net.sourceforge.owlapi - owlapi-apibinding - ${owlapi.version} - - - net.sourceforge.owlapi - owlapi-api - ${owlapi.version} - - - ${project.groupId} - vlog4j-core - ${project.version} - - - - + + + 4.0.0 + + + org.semanticweb.rulewerk + rulewerk-parent + 0.6.0-SNAPSHOT + + + rulewerk-owlapi + jar + + Rulewerk OWL API Support + Bindings and utilities for working with OWL ontologies using the OWL API + + + + net.sourceforge.owlapi + owlapi-apibinding + ${owlapi.version} + + + net.sourceforge.owlapi + owlapi-api + ${owlapi.version} + + + ${project.groupId} + rulewerk-core + ${project.version} + + + + diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/AbstractClassToRuleConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/AbstractClassToRuleConverter.java similarity index 96% rename from vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/AbstractClassToRuleConverter.java rename to rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/AbstractClassToRuleConverter.java index 3285795c7..fdee7057b 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/AbstractClassToRuleConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/AbstractClassToRuleConverter.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.owlapi; +package org.semanticweb.rulewerk.owlapi; /*- * #%L @@ -29,11 +29,11 @@ import org.semanticweb.owlapi.model.OWLClassExpression; import org.semanticweb.owlapi.model.OWLClassExpressionVisitor; import org.semanticweb.owlapi.model.OWLObjectPropertyExpression; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.PositiveLiteralImpl; /** * Abstract base class for converters that create rules from OWL class diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleBodyConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleBodyConverter.java similarity index 95% rename from vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleBodyConverter.java rename to rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleBodyConverter.java index 027ebb4af..d77a95389 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleBodyConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleBodyConverter.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.owlapi; +package org.semanticweb.rulewerk.owlapi; /*- * #%L @@ -42,10 +42,10 @@ import org.semanticweb.owlapi.model.OWLObjectOneOf; import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom; import org.semanticweb.owlapi.model.OWLObjectUnionOf; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.PositiveLiteralImpl; /** * Helper class for transforming OWL class expressions that occur as subclasses diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleHeadConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleHeadConverter.java similarity index 96% rename from vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleHeadConverter.java rename to rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleHeadConverter.java index 79dc8b3cf..4a958d114 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleHeadConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleHeadConverter.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.owlapi; +package org.semanticweb.rulewerk.owlapi; /*- * #%L @@ -42,9 +42,9 @@ import org.semanticweb.owlapi.model.OWLObjectOneOf; import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom; import org.semanticweb.owlapi.model.OWLObjectUnionOf; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.PositiveLiteralImpl; /** * Helper class for transforming OWL class expressions that occur as diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java similarity index 95% rename from vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java rename to rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java index 09e788f6a..f16abe0f0 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.owlapi; +package org.semanticweb.rulewerk.owlapi; import java.util.ArrayList; import java.util.Arrays; @@ -68,21 +68,21 @@ import org.semanticweb.owlapi.model.OWLSymmetricObjectPropertyAxiom; import org.semanticweb.owlapi.model.OWLTransitiveObjectPropertyAxiom; import org.semanticweb.owlapi.model.SWRLRule; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.TermType; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; -import org.semanticweb.vlog4j.core.model.implementation.ExistentialVariableImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.FactImpl; -import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; -import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; -import org.semanticweb.vlog4j.core.model.implementation.UniversalVariableImpl; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.TermType; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.ConjunctionImpl; +import org.semanticweb.rulewerk.core.model.implementation.ExistentialVariableImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.FactImpl; +import org.semanticweb.rulewerk.core.model.implementation.PositiveLiteralImpl; +import org.semanticweb.rulewerk.core.model.implementation.RuleImpl; +import org.semanticweb.rulewerk.core.model.implementation.UniversalVariableImpl; /** * Class for converting OWL axioms to rules. diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlFeatureNotSupportedException.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlFeatureNotSupportedException.java similarity index 96% rename from vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlFeatureNotSupportedException.java rename to rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlFeatureNotSupportedException.java index d3ddbf4cf..254da4dcd 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlFeatureNotSupportedException.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlFeatureNotSupportedException.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.owlapi; +package org.semanticweb.rulewerk.owlapi; /*- * #%L diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlToRulesConversionHelper.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java similarity index 90% rename from vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlToRulesConversionHelper.java rename to rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java index 02b63aae2..2a6f7ea05 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlToRulesConversionHelper.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.owlapi; +package org.semanticweb.rulewerk.owlapi; import java.io.UnsupportedEncodingException; import java.math.BigInteger; @@ -35,16 +35,16 @@ import org.semanticweb.owlapi.model.OWLNamedIndividual; import org.semanticweb.owlapi.model.OWLObjectProperty; import org.semanticweb.owlapi.model.OWLObjectPropertyExpression; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; -import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.FactImpl; -import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; -import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; -import org.semanticweb.vlog4j.owlapi.AbstractClassToRuleConverter.SimpleConjunction; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.FactImpl; +import org.semanticweb.rulewerk.core.model.implementation.PositiveLiteralImpl; +import org.semanticweb.rulewerk.core.model.implementation.PredicateImpl; +import org.semanticweb.rulewerk.owlapi.AbstractClassToRuleConverter.SimpleConjunction; /** * Utility class for helper functions that are used to convert OWL API objects diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlToRulesConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverter.java similarity index 92% rename from vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlToRulesConverter.java rename to rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverter.java index 2def20dc8..e5386caa4 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlToRulesConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverter.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.owlapi; +package org.semanticweb.rulewerk.owlapi; /*- * #%L @@ -23,8 +23,8 @@ import java.util.Set; import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Rule; /** * Class for converting OWL ontologies to rules. diff --git a/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverterTest.java b/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverterTest.java similarity index 98% rename from vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverterTest.java rename to rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverterTest.java index dd101601b..6ed6fee3a 100644 --- a/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverterTest.java +++ b/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverterTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.owlapi; +package org.semanticweb.rulewerk.owlapi; /*- * #%L @@ -42,13 +42,13 @@ import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom; import org.semanticweb.owlapi.model.OWLObjectUnionOf; import org.semanticweb.owlapi.model.OWLSubClassOfAxiom; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; public class OwlAxiomToRulesConverterTest { diff --git a/vlog4j-parser/LICENSE.txt b/rulewerk-parser/LICENSE.txt similarity index 100% rename from vlog4j-parser/LICENSE.txt rename to rulewerk-parser/LICENSE.txt diff --git a/vlog4j-parser/pom.xml b/rulewerk-parser/pom.xml similarity index 93% rename from vlog4j-parser/pom.xml rename to rulewerk-parser/pom.xml index b8a7b07f4..70df4a5b6 100644 --- a/vlog4j-parser/pom.xml +++ b/rulewerk-parser/pom.xml @@ -6,14 +6,14 @@ 4.0.0 - org.semanticweb.vlog4j - vlog4j-parent + org.semanticweb.rulewerk + rulewerk-parent 0.6.0-SNAPSHOT - vlog4j-parser + rulewerk-parser - VLog4j Parser + Rulewerk Parser http://maven.apache.org UTF-8 @@ -21,7 +21,7 @@ ${project.groupId} - vlog4j-core + rulewerk-core ${project.version} @@ -36,7 +36,7 @@ ruleparser - ${basedir}/src/main/java/org/semanticweb/vlog4j/parser/javacc/ + ${basedir}/src/main/java/org/semanticweb/rulewerk/parser/javacc/ javacc diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ConfigurableLiteralHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ConfigurableLiteralHandler.java similarity index 89% rename from vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ConfigurableLiteralHandler.java rename to rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ConfigurableLiteralHandler.java index bd5b14c24..a6cd79f27 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ConfigurableLiteralHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ConfigurableLiteralHandler.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.parser; +package org.semanticweb.rulewerk.parser; /*- * #%L @@ -20,8 +20,8 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.parser.javacc.SubParserFactory; /** * Handler for parsing a configurable literal expression. diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DataSourceDeclarationHandler.java similarity index 89% rename from vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java rename to rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DataSourceDeclarationHandler.java index bbc6a359c..bf89afe17 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DataSourceDeclarationHandler.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.parser; +package org.semanticweb.rulewerk.parser; /*- * #%L @@ -20,7 +20,7 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.DataSource; /** * Handler for parsing a custom Data Source declaration. diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DatatypeConstantHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DatatypeConstantHandler.java similarity index 91% rename from vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DatatypeConstantHandler.java rename to rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DatatypeConstantHandler.java index eec3b4fcd..12ad24f5e 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DatatypeConstantHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DatatypeConstantHandler.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.parser; +package org.semanticweb.rulewerk.parser; /*- * #%L @@ -20,7 +20,7 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; /** * Handler for parsing a custom Datatype constant. diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java similarity index 74% rename from vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java rename to rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java index 625f6f87c..f34bbc8c8 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.parser; +package org.semanticweb.rulewerk.parser; /*- * #%L @@ -20,12 +20,12 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.implementation.Serializer; -import org.semanticweb.vlog4j.parser.datasources.CsvFileDataSourceDeclarationHandler; -import org.semanticweb.vlog4j.parser.datasources.RdfFileDataSourceDeclarationHandler; -import org.semanticweb.vlog4j.parser.datasources.SparqlQueryResultDataSourceDeclarationHandler; -import org.semanticweb.vlog4j.parser.directives.ImportFileDirectiveHandler; -import org.semanticweb.vlog4j.parser.directives.ImportFileRelativeDirectiveHandler; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.parser.datasources.CsvFileDataSourceDeclarationHandler; +import org.semanticweb.rulewerk.parser.datasources.RdfFileDataSourceDeclarationHandler; +import org.semanticweb.rulewerk.parser.datasources.SparqlQueryResultDataSourceDeclarationHandler; +import org.semanticweb.rulewerk.parser.directives.ImportFileDirectiveHandler; +import org.semanticweb.rulewerk.parser.directives.ImportFileRelativeDirectiveHandler; /** * Default parser configuration. Registers default data sources. diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveArgument.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveArgument.java similarity index 98% rename from vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveArgument.java rename to rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveArgument.java index 51190723e..4d1b77764 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveArgument.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveArgument.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.parser; +package org.semanticweb.rulewerk.parser; /*- * #%L @@ -24,7 +24,7 @@ import java.util.Optional; import java.util.function.Function; -import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Term; /** * A tagged union representing the possible types allowed to appear as arguments diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java similarity index 95% rename from vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java rename to rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java index 69d772f70..fca86b07b 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.parser; +package org.semanticweb.rulewerk.parser; /*- * #%L @@ -28,11 +28,11 @@ import java.util.List; import java.util.NoSuchElementException; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.parser.javacc.JavaCCParser; -import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.parser.javacc.JavaCCParser; +import org.semanticweb.rulewerk.parser.javacc.SubParserFactory; /** * Handler for parsing a custom directive. diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarationRegistry.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java similarity index 90% rename from vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarationRegistry.java rename to rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java index e5e3ddba3..4319a79ab 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarationRegistry.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.parser; +package org.semanticweb.rulewerk.parser; /*- * #%L @@ -20,9 +20,9 @@ * #L% */ -import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; -import org.semanticweb.vlog4j.core.model.implementation.AbstractPrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.implementation.AbstractPrefixDeclarationRegistry; /** * Implementation of {@link PrefixDeclarationRegistry} that is used when parsing diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java similarity index 90% rename from vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java rename to rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java index 861781378..22b268165 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.parser; +package org.semanticweb.rulewerk.parser; /*- * #%L @@ -25,15 +25,15 @@ import java.util.List; import org.apache.commons.lang3.Validate; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase.ConfigurableLiteralDelimiter; -import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.parser.javacc.JavaCCParserBase.ConfigurableLiteralDelimiter; +import org.semanticweb.rulewerk.parser.javacc.SubParserFactory; /** * Class to keep parser configuration. @@ -146,7 +146,7 @@ public Constant parseDatatypeConstant(final String lexicalForm, final String dat /** * Check if a handler for this - * {@link org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase.ConfigurableLiteralDelimiter} + * {@link org.semanticweb.rulewerk.parser.javacc.JavaCCParserBase.ConfigurableLiteralDelimiter} * is registered * * @param delimiter delimiter to check. @@ -266,7 +266,7 @@ public KnowledgeBase parseDirectiveStatement(String name, List4.0.0 - org.semanticweb.vlog4j - vlog4j-parent + org.semanticweb.rulewerk + rulewerk-parent 0.6.0-SNAPSHOT - vlog4j-rdf + rulewerk-rdf jar - VLog4j RDF Support + Rulewerk RDF Support Bindings and utilities for working with RDF data ${project.groupId} - vlog4j-core + rulewerk-core ${project.version} diff --git a/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfModelConverter.java b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java similarity index 89% rename from vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfModelConverter.java rename to rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java index 0c58bb826..e7473f587 100644 --- a/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfModelConverter.java +++ b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.rdf; +package org.semanticweb.rulewerk.rdf; /*- * #%L @@ -31,13 +31,13 @@ import org.openrdf.model.Statement; import org.openrdf.model.URI; import org.openrdf.model.Value; -import org.semanticweb.vlog4j.core.model.api.NamedNull; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; /** * Class for converting RDF {@link Model}s to {@link PositiveLiteral} sets. diff --git a/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfValueToTermConverter.java b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java similarity index 81% rename from vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfValueToTermConverter.java rename to rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java index 361da7991..59b0ac934 100644 --- a/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfValueToTermConverter.java +++ b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.rdf; +package org.semanticweb.rulewerk.rdf; /*- * #%L @@ -26,12 +26,12 @@ import org.openrdf.model.Value; import org.openrdf.model.datatypes.XMLDatatypeUtil; import org.openrdf.rio.ntriples.NTriplesUtil; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; -import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.DatatypeConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.LanguageStringConstantImpl; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.DatatypeConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.LanguageStringConstantImpl; final class RdfValueToTermConverter { diff --git a/vlog4j-rdf/src/test/data/input/collections.ttl b/rulewerk-rdf/src/test/data/input/collections.ttl similarity index 100% rename from vlog4j-rdf/src/test/data/input/collections.ttl rename to rulewerk-rdf/src/test/data/input/collections.ttl diff --git a/vlog4j-rdf/src/test/data/input/escapedCharacters.ttl b/rulewerk-rdf/src/test/data/input/escapedCharacters.ttl similarity index 100% rename from vlog4j-rdf/src/test/data/input/escapedCharacters.ttl rename to rulewerk-rdf/src/test/data/input/escapedCharacters.ttl diff --git a/vlog4j-rdf/src/test/data/input/exampleFacts.ttl b/rulewerk-rdf/src/test/data/input/exampleFacts.ttl similarity index 100% rename from vlog4j-rdf/src/test/data/input/exampleFacts.ttl rename to rulewerk-rdf/src/test/data/input/exampleFacts.ttl diff --git a/vlog4j-rdf/src/test/data/input/labelledBNodes.ttl b/rulewerk-rdf/src/test/data/input/labelledBNodes.ttl similarity index 100% rename from vlog4j-rdf/src/test/data/input/labelledBNodes.ttl rename to rulewerk-rdf/src/test/data/input/labelledBNodes.ttl diff --git a/vlog4j-rdf/src/test/data/input/languageTags.ttl b/rulewerk-rdf/src/test/data/input/languageTags.ttl similarity index 100% rename from vlog4j-rdf/src/test/data/input/languageTags.ttl rename to rulewerk-rdf/src/test/data/input/languageTags.ttl diff --git a/vlog4j-rdf/src/test/data/input/literalValues.ttl b/rulewerk-rdf/src/test/data/input/literalValues.ttl similarity index 100% rename from vlog4j-rdf/src/test/data/input/literalValues.ttl rename to rulewerk-rdf/src/test/data/input/literalValues.ttl diff --git a/vlog4j-rdf/src/test/data/input/relativeURIs.ttl b/rulewerk-rdf/src/test/data/input/relativeURIs.ttl similarity index 100% rename from vlog4j-rdf/src/test/data/input/relativeURIs.ttl rename to rulewerk-rdf/src/test/data/input/relativeURIs.ttl diff --git a/vlog4j-rdf/src/test/data/input/unlabelledBNodes.ttl b/rulewerk-rdf/src/test/data/input/unlabelledBNodes.ttl similarity index 100% rename from vlog4j-rdf/src/test/data/input/unlabelledBNodes.ttl rename to rulewerk-rdf/src/test/data/input/unlabelledBNodes.ttl diff --git a/vlog4j-rdf/src/test/data/input/unnormalizedLiteralValues.ttl b/rulewerk-rdf/src/test/data/input/unnormalizedLiteralValues.ttl similarity index 100% rename from vlog4j-rdf/src/test/data/input/unnormalizedLiteralValues.ttl rename to rulewerk-rdf/src/test/data/input/unnormalizedLiteralValues.ttl diff --git a/vlog4j-rdf/src/test/data/output/.keep b/rulewerk-rdf/src/test/data/output/.keep similarity index 100% rename from vlog4j-rdf/src/test/data/output/.keep rename to rulewerk-rdf/src/test/data/output/.keep diff --git a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/RdfTestUtils.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfTestUtils.java similarity index 90% rename from vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/RdfTestUtils.java rename to rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfTestUtils.java index 700c3db4d..ca400540f 100644 --- a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/RdfTestUtils.java +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfTestUtils.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.rdf; +package org.semanticweb.rulewerk.rdf; /*- * #%L @@ -35,11 +35,11 @@ import org.openrdf.rio.RDFParser; import org.openrdf.rio.Rio; import org.openrdf.rio.helpers.StatementCollector; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; public final class RdfTestUtils { diff --git a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestConvertRdfFileToFacts.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestConvertRdfFileToFacts.java similarity index 94% rename from vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestConvertRdfFileToFacts.java rename to rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestConvertRdfFileToFacts.java index b5ab85281..c0f126e15 100644 --- a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestConvertRdfFileToFacts.java +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestConvertRdfFileToFacts.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.rdf; +package org.semanticweb.rulewerk.rdf; /*- * #%L @@ -22,10 +22,10 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; -import static org.semanticweb.vlog4j.rdf.RdfModelConverter.RDF_TRIPLE_PREDICATE_NAME; -import static org.semanticweb.vlog4j.rdf.RdfTestUtils.RDF_FIRST; -import static org.semanticweb.vlog4j.rdf.RdfTestUtils.RDF_NIL; -import static org.semanticweb.vlog4j.rdf.RdfTestUtils.RDF_REST; +import static org.semanticweb.rulewerk.rdf.RdfModelConverter.RDF_TRIPLE_PREDICATE_NAME; +import static org.semanticweb.rulewerk.rdf.RdfTestUtils.RDF_FIRST; +import static org.semanticweb.rulewerk.rdf.RdfTestUtils.RDF_NIL; +import static org.semanticweb.rulewerk.rdf.RdfTestUtils.RDF_REST; import java.io.File; import java.io.IOException; @@ -40,12 +40,12 @@ import org.openrdf.rio.RDFFormat; import org.openrdf.rio.RDFHandlerException; import org.openrdf.rio.RDFParseException; -import org.semanticweb.vlog4j.core.model.api.NamedNull; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; public class TestConvertRdfFileToFacts { diff --git a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java similarity index 85% rename from vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java rename to rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java index 65cc79ef9..fc24fcaf5 100644 --- a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.rdf; +package org.semanticweb.rulewerk.rdf; /*- * #%L @@ -35,16 +35,16 @@ import org.openrdf.rio.RDFFormat; import org.openrdf.rio.RDFHandlerException; import org.openrdf.rio.RDFParseException; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; public class TestReasonOverRdfFacts { From b2d825a52c72f2504927d12718c11a7c33c652ca Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Mar 2020 14:51:10 +0100 Subject: [PATCH 0567/1003] Update license headers --- .../rulewerk/client/picocli/ClientUtils.java | 8 +- .../client/picocli/PrintQueryResults.java | 8 +- .../rulewerk/client/picocli/SaveModel.java | 8 +- .../client/picocli/SaveQueryResults.java | 8 +- .../rulewerk/client/picocli/VLog4jClient.java | 8 +- .../picocli/VLog4jClientMaterialize.java | 8 +- .../client/picocli/PrintQueryResultsTest.java | 6 +- .../client/picocli/SaveModelTest.java | 8 +- .../client/picocli/SaveQueryResultsTest.java | 8 +- .../IncompatiblePredicateArityException.java | 4 +- .../PrefixDeclarationException.java | 8 +- .../exceptions/ReasonerStateException.java | 4 +- .../core/exceptions/VLog4jException.java | 8 +- .../exceptions/VLog4jRuntimeException.java | 4 +- .../core/model/api/AbstractConstant.java | 8 +- .../rulewerk/core/model/api/Conjunction.java | 4 +- .../rulewerk/core/model/api/Constant.java | 4 +- .../rulewerk/core/model/api/DataSource.java | 8 +- .../core/model/api/DataSourceDeclaration.java | 4 +- .../core/model/api/DatatypeConstant.java | 8 +- .../rulewerk/core/model/api/Entity.java | 4 +- .../core/model/api/ExistentialVariable.java | 8 +- .../rulewerk/core/model/api/Fact.java | 4 +- .../model/api/LanguageStringConstant.java | 8 +- .../rulewerk/core/model/api/Literal.java | 4 +- .../rulewerk/core/model/api/NamedNull.java | 8 +- .../core/model/api/NegativeLiteral.java | 4 +- .../core/model/api/PositiveLiteral.java | 4 +- .../rulewerk/core/model/api/Predicate.java | 8 +- .../model/api/PrefixDeclarationRegistry.java | 8 +- .../rulewerk/core/model/api/QueryResult.java | 86 +-- .../rulewerk/core/model/api/Rule.java | 4 +- .../rulewerk/core/model/api/Statement.java | 4 +- .../core/model/api/StatementVisitor.java | 4 +- .../rulewerk/core/model/api/SyntaxObject.java | 4 +- .../rulewerk/core/model/api/Term.java | 8 +- .../rulewerk/core/model/api/TermType.java | 4 +- .../rulewerk/core/model/api/TermVisitor.java | 4 +- .../rulewerk/core/model/api/Terms.java | 4 +- .../core/model/api/UniversalVariable.java | 8 +- .../rulewerk/core/model/api/Variable.java | 8 +- .../implementation/AbstractConstantImpl.java | 8 +- .../implementation/AbstractLiteralImpl.java | 4 +- .../AbstractPrefixDeclarationRegistry.java | 8 +- .../implementation/AbstractTermImpl.java | 8 +- .../model/implementation/ConjunctionImpl.java | 4 +- .../DataSourceDeclarationImpl.java | 4 +- .../implementation/DatatypeConstantImpl.java | 4 +- .../ExistentialVariableImpl.java | 4 +- .../model/implementation/Expressions.java | 4 +- .../core/model/implementation/FactImpl.java | 4 +- .../LanguageStringConstantImpl.java | 4 +- .../MergingPrefixDeclarationRegistry.java | 8 +- .../model/implementation/NamedNullImpl.java | 8 +- .../implementation/NegativeLiteralImpl.java | 4 +- .../implementation/PositiveLiteralImpl.java | 4 +- .../model/implementation/PredicateImpl.java | 186 +++---- .../implementation/RenamedNamedNull.java | 8 +- .../core/model/implementation/RuleImpl.java | 4 +- .../core/model/implementation/Serializer.java | 8 +- .../implementation/UniversalVariableImpl.java | 8 +- .../core/reasoner/AcyclicityNotion.java | 4 +- .../rulewerk/core/reasoner/Algorithm.java | 22 +- .../rulewerk/core/reasoner/Correctness.java | 4 +- .../core/reasoner/CyclicityResult.java | 4 +- .../rulewerk/core/reasoner/KnowledgeBase.java | 8 +- .../core/reasoner/KnowledgeBaseListener.java | 4 +- .../rulewerk/core/reasoner/LogLevel.java | 4 +- .../core/reasoner/QueryAnswerCount.java | 4 +- .../core/reasoner/QueryResultIterator.java | 4 +- .../rulewerk/core/reasoner/Reasoner.java | 8 +- .../rulewerk/core/reasoner/ReasonerState.java | 8 +- .../core/reasoner/RuleRewriteStrategy.java | 64 +-- .../implementation/CsvFileDataSource.java | 8 +- .../EmptyQueryResultIterator.java | 4 +- .../implementation/FileDataSource.java | 8 +- .../implementation/InMemoryDataSource.java | 8 +- .../implementation/ModelToVLogConverter.java | 4 +- .../implementation/QueryAnswerCountImpl.java | 4 +- .../implementation/QueryResultImpl.java | 4 +- .../implementation/RdfFileDataSource.java | 8 +- .../implementation/Skolemization.java | 8 +- .../SparqlQueryResultDataSource.java | 8 +- .../implementation/TermToVLogConverter.java | 266 ++++----- .../implementation/VLogDataSource.java | 4 +- .../implementation/VLogKnowledgeBase.java | 8 +- .../VLogQueryResultIterator.java | 4 +- .../reasoner/implementation/VLogReasoner.java | 8 +- .../implementation/VLogToModelConverter.java | 4 +- .../core/model/ConjunctionImplTest.java | 4 +- .../core/model/DataSourceDeclarationTest.java | 8 +- .../rulewerk/core/model/FactTest.java | 4 +- .../MergingPrefixDeclarationRegistryTest.java | 8 +- .../core/model/NegativeLiteralImplTest.java | 4 +- .../core/model/PositiveLiteralImplTest.java | 4 +- .../core/model/PredicateImplTest.java | 8 +- .../rulewerk/core/model/RuleImplTest.java | 4 +- .../rulewerk/core/model/TermImplTest.java | 8 +- .../core/reasoner/KnowledgeBaseTest.java | 8 +- .../rulewerk/core/reasoner/LoggingTest.java | 4 +- .../core/reasoner/ReasonerTimeoutTest.java | 4 +- .../implementation/AddDataSourceTest.java | 4 +- .../implementation/AnswerQueryTest.java | 4 +- .../implementation/CsvFileDataSourceTest.java | 8 +- .../FileDataSourceTestUtils.java | 8 +- .../GeneratedAnonymousIndividualsTest.java | 4 +- .../ModelToVLogConverterTest.java | 504 +++++++++--------- .../implementation/QueryAnswerCountTest.java | 4 +- .../QueryAnsweringCorrectnessTest.java | 4 +- .../implementation/QueryResultImplTest.java | 120 ++--- .../implementation/QueryResultsUtils.java | 8 +- .../implementation/RdfFileDataSourceTest.java | 8 +- .../implementation/SkolemizationTest.java | 8 +- .../SparqlQueryResultDataSourceTest.java | 4 +- .../implementation/VLogReasonerBasics.java | 216 ++++---- .../VLogReasonerCombinedInputs.java | 4 +- .../implementation/VLogReasonerCsvInput.java | 8 +- .../implementation/VLogReasonerCsvOutput.java | 4 +- .../implementation/VLogReasonerNegation.java | 4 +- .../implementation/VLogReasonerRdfInput.java | 8 +- .../VLogReasonerSparqlInput.java | 4 +- .../implementation/VLogReasonerStateTest.java | 4 +- .../VLogReasonerWriteInferencesTest.java | 8 +- .../VLogToModelConverterTest.java | 4 +- .../vlog/ExportQueryResultToCsvFileTest.java | 4 +- .../core/reasoner/vlog/LargeAritiesTest.java | 278 +++++----- .../reasoner/vlog/StratifiedNegationTest.java | 4 +- .../vlog/VLogDataFromCsvFileTest.java | 8 +- .../reasoner/vlog/VLogDataFromMemoryTest.java | 496 ++++++++--------- .../vlog/VLogDataFromRdfFileTest.java | 4 +- .../core/reasoner/vlog/VLogExpressions.java | 4 +- .../reasoner/vlog/VLogQueryResultUtils.java | 4 +- .../core/reasoner/vlog/VLogQueryTest.java | 4 +- .../core/reasoner/vlog/VLogTermNamesTest.java | 4 +- .../examples/CompareWikidataDBpedia.java | 8 +- .../rulewerk/examples/CountingTriangles.java | 6 +- .../rulewerk/examples/DoidExample.java | 4 +- .../rulewerk/examples/ExamplesUtils.java | 4 +- .../InMemoryGraphAnalysisExample.java | 4 +- .../examples/SimpleReasoningExample.java | 4 +- .../examples/core/AddDataFromCsvFile.java | 8 +- .../examples/core/AddDataFromRdfFile.java | 8 +- .../core/AddDataFromSparqlQueryResults.java | 414 +++++++------- .../core/ConfigureReasonerLogging.java | 4 +- .../SkolemVsRestrictedChaseTermination.java | 272 +++++----- .../examples/graal/AddDataFromDlgpFile.java | 4 +- .../examples/graal/AddDataFromGraal.java | 4 +- .../examples/graal/DoidExampleGraal.java | 8 +- .../owlapi/OwlOntologyToRulesAndFacts.java | 282 +++++----- .../examples/rdf/AddDataFromRdfModel.java | 368 ++++++------- .../graal/GraalConjunctiveQueryToRule.java | 4 +- .../rulewerk/graal/GraalConvertException.java | 4 +- .../graal/GraalToVLog4JModelConverter.java | 8 +- .../GraalToVLog4JModelConverterTest.java | 4 +- .../owlapi/AbstractClassToRuleConverter.java | 8 +- .../owlapi/ClassToRuleBodyConverter.java | 8 +- .../owlapi/ClassToRuleHeadConverter.java | 8 +- .../owlapi/OwlAxiomToRulesConverter.java | 8 +- .../OwlFeatureNotSupportedException.java | 4 +- .../owlapi/OwlToRulesConversionHelper.java | 4 +- .../rulewerk/owlapi/OwlToRulesConverter.java | 8 +- .../owlapi/OwlAxiomToRulesConverterTest.java | 4 +- .../parser/ConfigurableLiteralHandler.java | 8 +- .../parser/DataSourceDeclarationHandler.java | 4 +- .../parser/DatatypeConstantHandler.java | 8 +- .../parser/DefaultParserConfiguration.java | 8 +- .../rulewerk/parser/DirectiveArgument.java | 8 +- .../rulewerk/parser/DirectiveHandler.java | 8 +- .../LocalPrefixDeclarationRegistry.java | 8 +- .../rulewerk/parser/ParserConfiguration.java | 8 +- .../rulewerk/parser/ParsingException.java | 8 +- .../rulewerk/parser/RuleParser.java | 8 +- .../CsvFileDataSourceDeclarationHandler.java | 8 +- .../RdfFileDataSourceDeclarationHandler.java | 8 +- ...eryResultDataSourceDeclarationHandler.java | 4 +- .../ImportFileDirectiveHandler.java | 8 +- .../ImportFileRelativeDirectiveHandler.java | 8 +- .../parser/javacc/JavaCCParserBase.java | 8 +- .../parser/javacc/SubParserFactory.java | 8 +- .../parser/DirectiveArgumentTest.java | 8 +- .../rulewerk/parser/DirectiveHandlerTest.java | 8 +- .../rulewerk/parser/EntityTest.java | 8 +- .../parser/ParserConfigurationTest.java | 8 +- .../rulewerk/parser/ParserTestUtils.java | 8 +- .../RuleParserConfigurableLiteralTest.java | 8 +- .../parser/RuleParserDataSourceTest.java | 8 +- .../parser/RuleParserParseFactTest.java | 8 +- .../rulewerk/parser/RuleParserTest.java | 8 +- .../parser/javacc/JavaCCParserBaseTest.java | 8 +- .../rulewerk/rdf/RdfModelConverter.java | 8 +- .../rulewerk/rdf/RdfValueToTermConverter.java | 8 +- .../rulewerk/rdf/RdfTestUtils.java | 8 +- .../rdf/TestConvertRdfFileToFacts.java | 8 +- .../rulewerk/rdf/TestReasonOverRdfFacts.java | 8 +- 194 files changed, 2337 insertions(+), 2337 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/ClientUtils.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/ClientUtils.java index 4984fba5e..edaf61f89 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/ClientUtils.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/ClientUtils.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Client + * Rulewerk Client * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResults.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResults.java index 935dde8fc..4f44b7f4c 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResults.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResults.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Client + * Rulewerk Client * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveModel.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveModel.java index 1af92ac6a..354fa52f4 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveModel.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveModel.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Client + * Rulewerk Client * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResults.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResults.java index 5e593b00c..8d43da5fd 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResults.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResults.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Client + * Rulewerk Client * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClient.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClient.java index 8663d80d4..c2db5d180 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClient.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClient.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Client + * Rulewerk Client * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClientMaterialize.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClientMaterialize.java index 52bc1e777..961c3a8ed 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClientMaterialize.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClientMaterialize.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Client + * Rulewerk Client * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResultsTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResultsTest.java index 6cf1df352..f1845c573 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResultsTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResultsTest.java @@ -4,14 +4,14 @@ * #%L * Rulewerk Client * %% - * Copyright (C) 2018 - 2019 Rulewerk Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/SaveModelTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/SaveModelTest.java index 7a4b65532..09a780f0c 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/SaveModelTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/SaveModelTest.java @@ -13,16 +13,16 @@ /*- * #%L - * VLog4j Client + * Rulewerk Client * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResultsTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResultsTest.java index 2ef16cb40..608b10438 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResultsTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResultsTest.java @@ -12,16 +12,16 @@ /*- * #%L - * VLog4j Client + * Rulewerk Client * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/IncompatiblePredicateArityException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/IncompatiblePredicateArityException.java index a274e91cf..67f033a81 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/IncompatiblePredicateArityException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/IncompatiblePredicateArityException.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/PrefixDeclarationException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/PrefixDeclarationException.java index afd7ec5bd..b81bfaffe 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/PrefixDeclarationException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/PrefixDeclarationException.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/ReasonerStateException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/ReasonerStateException.java index bd28395dc..813035df3 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/ReasonerStateException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/ReasonerStateException.java @@ -1,8 +1,8 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jException.java index 7d848760a..d8c046a19 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jException.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jRuntimeException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jRuntimeException.java index d0adc72e8..65a52d9c6 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jRuntimeException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jRuntimeException.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/AbstractConstant.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/AbstractConstant.java index 9ce72ce46..750434b73 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/AbstractConstant.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/AbstractConstant.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java index 8251cc869..be4e9f9ee 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Constant.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Constant.java index 0a9c431cd..bf04acd88 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Constant.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Constant.java @@ -2,9 +2,9 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSource.java index 83390c1c7..402d80127 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSource.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java index 398e7811a..da1837bba 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java @@ -4,9 +4,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java index fb75afea7..28fbc87d5 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Entity.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Entity.java index 541f0c598..431b90299 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Entity.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Entity.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/ExistentialVariable.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/ExistentialVariable.java index 1ca388565..4e7d60d78 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/ExistentialVariable.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/ExistentialVariable.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Fact.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Fact.java index ff082b2eb..61a302e32 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Fact.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Fact.java @@ -4,9 +4,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/LanguageStringConstant.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/LanguageStringConstant.java index e10e49fbb..fbd60d57b 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/LanguageStringConstant.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/LanguageStringConstant.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java index 663ecf4a8..b278f7722 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NamedNull.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NamedNull.java index 921efca58..4b1350265 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NamedNull.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NamedNull.java @@ -2,16 +2,16 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NegativeLiteral.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NegativeLiteral.java index 90caac997..df5c6e6b0 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NegativeLiteral.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NegativeLiteral.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PositiveLiteral.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PositiveLiteral.java index 66fa04ad2..6c4598a77 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PositiveLiteral.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PositiveLiteral.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Predicate.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Predicate.java index 0a0dc5808..e34ec9b24 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Predicate.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Predicate.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java index eb1549f4f..1d24f1daa 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-syntax + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/QueryResult.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/QueryResult.java index f3d1cc6d7..49c6319b7 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/QueryResult.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/QueryResult.java @@ -1,43 +1,43 @@ -package org.semanticweb.rulewerk.core.model.api; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.util.List; - -/** - * A Query Result represents a list of terms that match the terms of the asked - * query. The terms can be named individuals (constants) and anonymous - * individuals (blanks). - * - * @author Irina Dragoste - * - */ -public interface QueryResult { - - /** - * Getter for the terms that represent a query answer. - * - * @return the terms that represent a query answer. They can be named - * individuals (constants) and anonymous individuals (blanks). - */ - List getTerms(); - -} +package org.semanticweb.rulewerk.core.model.api; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.List; + +/** + * A Query Result represents a list of terms that match the terms of the asked + * query. The terms can be named individuals (constants) and anonymous + * individuals (blanks). + * + * @author Irina Dragoste + * + */ +public interface QueryResult { + + /** + * Getter for the terms that represent a query answer. + * + * @return the terms that represent a query answer. They can be named + * individuals (constants) and anonymous individuals (blanks). + */ + List getTerms(); + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Rule.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Rule.java index 8f7b4ee33..81a5000b5 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Rule.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Rule.java @@ -4,9 +4,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Statement.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Statement.java index 69c6f83c8..fc2b4b009 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Statement.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Statement.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/StatementVisitor.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/StatementVisitor.java index ced6c05dc..5e8f308f6 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/StatementVisitor.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/StatementVisitor.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/SyntaxObject.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/SyntaxObject.java index 73dcafc12..31de18cf6 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/SyntaxObject.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/SyntaxObject.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Term.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Term.java index 0c631d653..c8aabae47 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Term.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Term.java @@ -2,16 +2,16 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/TermType.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/TermType.java index 9453cb25c..490604400 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/TermType.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/TermType.java @@ -2,9 +2,9 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/TermVisitor.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/TermVisitor.java index 1dad479be..36d662321 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/TermVisitor.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/TermVisitor.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Terms.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Terms.java index 2baf7355e..a611d0bc8 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Terms.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Terms.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/UniversalVariable.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/UniversalVariable.java index 12bc6ab19..e432c3c20 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/UniversalVariable.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/UniversalVariable.java @@ -2,16 +2,16 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Variable.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Variable.java index 69210b5d0..ba0785752 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Variable.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Variable.java @@ -2,16 +2,16 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractConstantImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractConstantImpl.java index 699a96d41..426c993c6 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractConstantImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractConstantImpl.java @@ -5,16 +5,16 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java index ba4290138..e19864aee 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java index 2f58af65b..3e9127f74 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-syntax + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractTermImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractTermImpl.java index c8040af01..e6c037d43 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractTermImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractTermImpl.java @@ -5,16 +5,16 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ConjunctionImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ConjunctionImpl.java index 8f24855d3..24a750694 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ConjunctionImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ConjunctionImpl.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DataSourceDeclarationImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DataSourceDeclarationImpl.java index 1fcb6bd68..db80f1549 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DataSourceDeclarationImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DataSourceDeclarationImpl.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java index 6f42312c0..f81e44681 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java @@ -4,9 +4,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ExistentialVariableImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ExistentialVariableImpl.java index 953d92c31..2f7f41e5f 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ExistentialVariableImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ExistentialVariableImpl.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java index fcb3e01eb..24998c007 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java @@ -4,9 +4,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/FactImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/FactImpl.java index 60ee41579..fad53f4bb 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/FactImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/FactImpl.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/LanguageStringConstantImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/LanguageStringConstantImpl.java index 9a112ba09..235bfcd31 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/LanguageStringConstantImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/LanguageStringConstantImpl.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java index a63f73950..77aa2038b 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-syntax + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NamedNullImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NamedNullImpl.java index ff41632ae..803629460 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NamedNullImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NamedNullImpl.java @@ -2,16 +2,16 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImpl.java index 740e8af97..554ae0f63 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImpl.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImpl.java index 83eb4e4e0..c0aba7096 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImpl.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java index 25e9d3c21..6ec346dae 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java @@ -1,93 +1,93 @@ -package org.semanticweb.rulewerk.core.model.implementation; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import org.apache.commons.lang3.Validate; -import org.eclipse.jdt.annotation.NonNull; -import org.semanticweb.rulewerk.core.model.api.Predicate; - -/** - * Implementation for {@link Predicate}. Supports predicates of arity 1 or - * higher. - * - * @author Irina Dragoste - * - */ -public class PredicateImpl implements Predicate { - - final private String name; - - final private int arity; - - /** - * Constructor for {@link Predicate}s of arity 1 or higher. - * - * @param name a non-blank String (not null, nor empty or whitespace). - * @param arity an int value strictly greater than 0. - */ - public PredicateImpl(@NonNull String name, int arity) { - Validate.notBlank(name, "Predicates cannot be named by blank Strings."); - Validate.isTrue(arity > 0, "Predicate arity must be greater than zero: %d", arity); - - this.name = name; - this.arity = arity; - } - - @Override - public String getName() { - return this.name; - } - - @Override - public int getArity() { - return this.arity; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = this.arity; - result = prime * result + this.name.hashCode(); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (!(obj instanceof Predicate)) { - return false; - } - final Predicate other = (Predicate) obj; - - return this.arity == other.getArity() && this.name.equals(other.getName()); - } - - @Override - public String toString() { - return getSyntacticRepresentation(); - } - -} +package org.semanticweb.rulewerk.core.model.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.apache.commons.lang3.Validate; +import org.eclipse.jdt.annotation.NonNull; +import org.semanticweb.rulewerk.core.model.api.Predicate; + +/** + * Implementation for {@link Predicate}. Supports predicates of arity 1 or + * higher. + * + * @author Irina Dragoste + * + */ +public class PredicateImpl implements Predicate { + + final private String name; + + final private int arity; + + /** + * Constructor for {@link Predicate}s of arity 1 or higher. + * + * @param name a non-blank String (not null, nor empty or whitespace). + * @param arity an int value strictly greater than 0. + */ + public PredicateImpl(@NonNull String name, int arity) { + Validate.notBlank(name, "Predicates cannot be named by blank Strings."); + Validate.isTrue(arity > 0, "Predicate arity must be greater than zero: %d", arity); + + this.name = name; + this.arity = arity; + } + + @Override + public String getName() { + return this.name; + } + + @Override + public int getArity() { + return this.arity; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = this.arity; + result = prime * result + this.name.hashCode(); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (!(obj instanceof Predicate)) { + return false; + } + final Predicate other = (Predicate) obj; + + return this.arity == other.getArity() && this.name.equals(other.getName()); + } + + @Override + public String toString() { + return getSyntacticRepresentation(); + } + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RenamedNamedNull.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RenamedNamedNull.java index d54bf8512..140ac7532 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RenamedNamedNull.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RenamedNamedNull.java @@ -4,16 +4,16 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RuleImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RuleImpl.java index acd038dc0..aae5c7233 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RuleImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RuleImpl.java @@ -5,9 +5,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index 6ee191e48..9df2cde79 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -9,16 +9,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/UniversalVariableImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/UniversalVariableImpl.java index 1211841b4..ce6b40aa6 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/UniversalVariableImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/UniversalVariableImpl.java @@ -5,16 +5,16 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/AcyclicityNotion.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/AcyclicityNotion.java index 36676c4ae..64c828b51 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/AcyclicityNotion.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/AcyclicityNotion.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Algorithm.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Algorithm.java index ea3994d5a..d45b9359d 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Algorithm.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Algorithm.java @@ -1,10 +1,10 @@ -package org.semanticweb.rulewerk.core.reasoner; - -/* +package org.semanticweb.rulewerk.core.reasoner; + +/* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,9 +17,9 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * #L% - */ - -public enum Algorithm { - SKOLEM_CHASE, RESTRICTED_CHASE -} + * #L% + */ + +public enum Algorithm { + SKOLEM_CHASE, RESTRICTED_CHASE +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Correctness.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Correctness.java index 1c6d077bd..264616546 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Correctness.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Correctness.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/CyclicityResult.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/CyclicityResult.java index 2e289278b..c5898ef05 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/CyclicityResult.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/CyclicityResult.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java index 90cc3bb74..5ebe5560c 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseListener.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseListener.java index 127504d46..c46fc60cb 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseListener.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseListener.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LogLevel.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LogLevel.java index fa1a54d45..875612e2f 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LogLevel.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LogLevel.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/QueryAnswerCount.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/QueryAnswerCount.java index 56cf95bcf..c34419579 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/QueryAnswerCount.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/QueryAnswerCount.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/QueryResultIterator.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/QueryResultIterator.java index 99d08f05e..981d1f5b8 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/QueryResultIterator.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/QueryResultIterator.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java index 04138ef5a..ad4825fb1 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java @@ -18,16 +18,16 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/ReasonerState.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/ReasonerState.java index bf22ef019..365aec7f7 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/ReasonerState.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/ReasonerState.java @@ -2,16 +2,16 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/RuleRewriteStrategy.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/RuleRewriteStrategy.java index 3aecb060a..8df1012a3 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/RuleRewriteStrategy.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/RuleRewriteStrategy.java @@ -1,32 +1,32 @@ -package org.semanticweb.rulewerk.core.reasoner; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -public enum RuleRewriteStrategy { - /** - * Rules are not re-written - */ - NONE, - /** - * Rule heads are split into head pieces whenever possible - */ - SPLIT_HEAD_PIECES -} +package org.semanticweb.rulewerk.core.reasoner; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +public enum RuleRewriteStrategy { + /** + * Rules are not re-written + */ + NONE, + /** + * Rule heads are split into head pieces whenever possible + */ + SPLIT_HEAD_PIECES +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java index fee712e49..83aff537a 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/EmptyQueryResultIterator.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/EmptyQueryResultIterator.java index 9c48bbb10..449a9dbe9 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/EmptyQueryResultIterator.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/EmptyQueryResultIterator.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java index b0761e238..4d79ae3a2 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/InMemoryDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/InMemoryDataSource.java index 13a7066a8..c3033c8a6 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/InMemoryDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/InMemoryDataSource.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java index 1f2f943ee..c146179c9 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountImpl.java index 27814ab4e..edd6b44ca 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountImpl.java @@ -5,9 +5,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java index 6727fd558..b143f7b4f 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java @@ -2,9 +2,9 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java index 43e1c44b1..c1274aac2 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java index ddde0498a..aaa9d1f2f 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java index b105f8d82..2ee6c900b 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java index b83cc7a12..345bd8aaf 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java @@ -1,133 +1,133 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import org.semanticweb.rulewerk.core.model.api.NamedNull; -import org.semanticweb.rulewerk.core.model.api.TermType; -import org.semanticweb.rulewerk.core.model.api.AbstractConstant; -import org.semanticweb.rulewerk.core.model.api.Constant; -import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; -import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; -import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; -import org.semanticweb.rulewerk.core.model.api.TermVisitor; -import org.semanticweb.rulewerk.core.model.api.UniversalVariable; - -/** - * A visitor that converts {@link Term}s of different types to corresponding - * internal VLog model {@link karmaresearch.vlog.Term}s. - * - * @author Irina Dragoste - * - */ -class TermToVLogConverter implements TermVisitor { - - /** - * Transforms an abstract constant to a {@link karmaresearch.vlog.Term} with the - * same name and type {@link karmaresearch.vlog.Term.TermType#CONSTANT}. - */ - @Override - public karmaresearch.vlog.Term visit(AbstractConstant term) { - return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, getVLogNameForConstant(term)); - } - - /** - * Transforms a datatype constant to a {@link karmaresearch.vlog.Term} with the - * same name and type {@link karmaresearch.vlog.Term.TermType#CONSTANT}. - */ - @Override - public karmaresearch.vlog.Term visit(DatatypeConstant term) { - return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, term.getName()); - } - - /** - * Transforms a language-tagged string constant to a - * {@link karmaresearch.vlog.Term} with the same name and type - * {@link karmaresearch.vlog.Term.TermType#CONSTANT}. - */ - @Override - public karmaresearch.vlog.Term visit(LanguageStringConstant term) { - return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, term.getName()); - } - - /** - * Converts the given constant to the name of a constant in VLog. - * - * @param constant - * @return VLog constant string - */ - public static String getVLogNameForConstant(Constant constant) { - if (constant.getType() == TermType.ABSTRACT_CONSTANT) { - String vLog4jConstantName = constant.getName(); - if (vLog4jConstantName.contains(":")) { // enclose IRIs with < > - return "<" + vLog4jConstantName + ">"; - } else { // keep relative IRIs unchanged - return vLog4jConstantName; - } - } else { // datatype literal - return constant.getName(); - } - } - - /** - * Converts the string representation of a constant in VLog4j directly to the - * name of a constant in VLog, without parsing it into a {@link Constant} first. - * - * @param vLog4jConstantName - * @return VLog constant string - */ - public static String getVLogNameForConstantName(String vLog4jConstantName) { - if (vLog4jConstantName.startsWith("\"")) { // keep datatype literal strings unchanged - return vLog4jConstantName; - } else if (vLog4jConstantName.contains(":")) { // enclose IRIs with < > - return "<" + vLog4jConstantName + ">"; - } else { // keep relative IRIs unchanged - return vLog4jConstantName; - } - } - - /** - * Transforms a universal variable to a {@link karmaresearch.vlog.Term} with the - * same name and type {@link karmaresearch.vlog.Term.TermType#VARIABLE}. - */ - @Override - public karmaresearch.vlog.Term visit(UniversalVariable term) { - return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, term.getName()); - } - - /** - * Transforms an existential variable to a {@link karmaresearch.vlog.Term} with - * the same name and type {@link karmaresearch.vlog.Term.TermType#VARIABLE}. - */ - @Override - public karmaresearch.vlog.Term visit(ExistentialVariable term) { - return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, "!" + term.getName()); - } - - /** - * Transforms a named null to a {@link karmaresearch.vlog.Term} with the same name - * and type {@link karmaresearch.vlog.Term.TermType#BLANK}. - */ - @Override - public karmaresearch.vlog.Term visit(NamedNull term) { - return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, term.getName()); - } - -} +package org.semanticweb.rulewerk.core.reasoner.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.TermType; +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; +import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; +import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; +import org.semanticweb.rulewerk.core.model.api.TermVisitor; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; + +/** + * A visitor that converts {@link Term}s of different types to corresponding + * internal VLog model {@link karmaresearch.vlog.Term}s. + * + * @author Irina Dragoste + * + */ +class TermToVLogConverter implements TermVisitor { + + /** + * Transforms an abstract constant to a {@link karmaresearch.vlog.Term} with the + * same name and type {@link karmaresearch.vlog.Term.TermType#CONSTANT}. + */ + @Override + public karmaresearch.vlog.Term visit(AbstractConstant term) { + return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, getVLogNameForConstant(term)); + } + + /** + * Transforms a datatype constant to a {@link karmaresearch.vlog.Term} with the + * same name and type {@link karmaresearch.vlog.Term.TermType#CONSTANT}. + */ + @Override + public karmaresearch.vlog.Term visit(DatatypeConstant term) { + return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, term.getName()); + } + + /** + * Transforms a language-tagged string constant to a + * {@link karmaresearch.vlog.Term} with the same name and type + * {@link karmaresearch.vlog.Term.TermType#CONSTANT}. + */ + @Override + public karmaresearch.vlog.Term visit(LanguageStringConstant term) { + return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, term.getName()); + } + + /** + * Converts the given constant to the name of a constant in VLog. + * + * @param constant + * @return VLog constant string + */ + public static String getVLogNameForConstant(Constant constant) { + if (constant.getType() == TermType.ABSTRACT_CONSTANT) { + String vLog4jConstantName = constant.getName(); + if (vLog4jConstantName.contains(":")) { // enclose IRIs with < > + return "<" + vLog4jConstantName + ">"; + } else { // keep relative IRIs unchanged + return vLog4jConstantName; + } + } else { // datatype literal + return constant.getName(); + } + } + + /** + * Converts the string representation of a constant in VLog4j directly to the + * name of a constant in VLog, without parsing it into a {@link Constant} first. + * + * @param vLog4jConstantName + * @return VLog constant string + */ + public static String getVLogNameForConstantName(String vLog4jConstantName) { + if (vLog4jConstantName.startsWith("\"")) { // keep datatype literal strings unchanged + return vLog4jConstantName; + } else if (vLog4jConstantName.contains(":")) { // enclose IRIs with < > + return "<" + vLog4jConstantName + ">"; + } else { // keep relative IRIs unchanged + return vLog4jConstantName; + } + } + + /** + * Transforms a universal variable to a {@link karmaresearch.vlog.Term} with the + * same name and type {@link karmaresearch.vlog.Term.TermType#VARIABLE}. + */ + @Override + public karmaresearch.vlog.Term visit(UniversalVariable term) { + return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, term.getName()); + } + + /** + * Transforms an existential variable to a {@link karmaresearch.vlog.Term} with + * the same name and type {@link karmaresearch.vlog.Term.TermType#VARIABLE}. + */ + @Override + public karmaresearch.vlog.Term visit(ExistentialVariable term) { + return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, "!" + term.getName()); + } + + /** + * Transforms a named null to a {@link karmaresearch.vlog.Term} with the same name + * and type {@link karmaresearch.vlog.Term.TermType#BLANK}. + */ + @Override + public karmaresearch.vlog.Term visit(NamedNull term) { + return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, term.getName()); + } + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogDataSource.java index d03b1b118..68eb58133 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogDataSource.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogKnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogKnowledgeBase.java index c66b3094d..a7e7da9aa 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogKnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogKnowledgeBase.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2020 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogQueryResultIterator.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogQueryResultIterator.java index 835fe699a..1db2cc922 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogQueryResultIterator.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogQueryResultIterator.java @@ -2,9 +2,9 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java index a6b48b5bf..66f88d4bb 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java @@ -48,16 +48,16 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverter.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverter.java index 3864b4fb7..d90331ccc 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverter.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverter.java @@ -2,9 +2,9 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ConjunctionImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ConjunctionImplTest.java index 2e6dba525..322bbda3f 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ConjunctionImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ConjunctionImplTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java index 880172977..32ef82333 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/FactTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/FactTest.java index 70763c3a4..7539d60c9 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/FactTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/FactTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java index c6202e864..d6889aae4 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/NegativeLiteralImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/NegativeLiteralImplTest.java index c97f71504..34a7e9fc9 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/NegativeLiteralImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/NegativeLiteralImplTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PositiveLiteralImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PositiveLiteralImplTest.java index 265f096c9..d4a93e489 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PositiveLiteralImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PositiveLiteralImplTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PredicateImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PredicateImplTest.java index 7c398b37b..d8274db91 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PredicateImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PredicateImplTest.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/RuleImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/RuleImplTest.java index 47c759252..5fc0ee6e6 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/RuleImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/RuleImplTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/TermImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/TermImplTest.java index 77f47b7b6..9aa9f7129 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/TermImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/TermImplTest.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java index 1305e8acc..b44f89c24 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/LoggingTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/LoggingTest.java index 6b51bfe7a..269cb56cc 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/LoggingTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/LoggingTest.java @@ -11,9 +11,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/ReasonerTimeoutTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/ReasonerTimeoutTest.java index 8e64d2915..b8e28c53d 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/ReasonerTimeoutTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/ReasonerTimeoutTest.java @@ -5,9 +5,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java index 5e073a77e..87d84362f 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java @@ -5,9 +5,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AnswerQueryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AnswerQueryTest.java index be9efb6c2..9de6276f3 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AnswerQueryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AnswerQueryTest.java @@ -7,9 +7,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java index 22cc04bbb..f9b840f1e 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java index 945c4482a..74a5215fe 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java @@ -7,16 +7,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java index 1460a5a7f..92512fdd6 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java index a8b773a8d..8ff7491e2 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java @@ -1,252 +1,252 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -import org.junit.Test; -import org.semanticweb.rulewerk.core.model.api.NamedNull; -import org.semanticweb.rulewerk.core.model.api.Constant; -import org.semanticweb.rulewerk.core.model.api.Fact; -import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; -import org.semanticweb.rulewerk.core.model.api.Predicate; -import org.semanticweb.rulewerk.core.model.api.Rule; -import org.semanticweb.rulewerk.core.model.api.Term; -import org.semanticweb.rulewerk.core.model.api.Variable; -import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; -import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.reasoner.RuleRewriteStrategy; - -public class ModelToVLogConverterTest { - - @Test - public void testToVLogTermVariable() { - final Variable variable = Expressions.makeUniversalVariable("var"); - final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term( - karmaresearch.vlog.Term.TermType.VARIABLE, "var"); - - final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(variable); - - assertEquals(expectedVLogTerm, vLogTerm); - } - - @Test - public void testToVLogTermAbstractConstant() { - final Constant constant = Expressions.makeAbstractConstant("const"); - final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term( - karmaresearch.vlog.Term.TermType.CONSTANT, "const"); - - final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(constant); - - assertEquals(expectedVLogTerm, vLogTerm); - assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstantName(constant.getName())); - assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstant(constant)); - } - - @Test - public void testToVLogTermAbstractConstantIri() { - final Constant constant = Expressions.makeAbstractConstant("http://example.org"); - final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term( - karmaresearch.vlog.Term.TermType.CONSTANT, ""); - - final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(constant); - - assertEquals(expectedVLogTerm, vLogTerm); - assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstantName(constant.getName())); - assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstant(constant)); - } - - @Test - public void testToVLogTermDatatypeConstant() { - final Constant constant = Expressions.makeDatatypeConstant("c", "http://example.org"); - final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term( - karmaresearch.vlog.Term.TermType.CONSTANT, "\"c\"^^"); - - final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(constant); - - assertEquals(expectedVLogTerm, vLogTerm); - assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstantName(constant.getName())); - assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstant(constant)); - } - - @Test - public void testToVLogTermLanguageStringConstant() { - final Constant constant = Expressions.makeLanguageStringConstant("c", "en"); - final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term( - karmaresearch.vlog.Term.TermType.CONSTANT, "\"c\"@en"); - - final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(constant); - - assertEquals(expectedVLogTerm, vLogTerm); - assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstantName(constant.getName())); - } - - @Test - public void testToVLogTermBlank() { - final NamedNull blank = new NamedNullImpl("blank"); - final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term( - karmaresearch.vlog.Term.TermType.BLANK, "blank"); - - final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(blank); - - assertEquals(expectedVLogTerm, vLogTerm); - } - - @Test - public void testToVLogTermArray() { - final Variable vx = Expressions.makeUniversalVariable("x"); - final Variable vxToo = Expressions.makeUniversalVariable("x"); - final Variable vy = Expressions.makeUniversalVariable("y"); - final Constant cx = Expressions.makeAbstractConstant("x"); - final NamedNull bx = new NamedNullImpl("x"); - final List terms = Arrays.asList(vx, cx, vxToo, bx, vy); - - final karmaresearch.vlog.Term expectedVx = new karmaresearch.vlog.Term( - karmaresearch.vlog.Term.TermType.VARIABLE, "x"); - final karmaresearch.vlog.Term expectedVy = new karmaresearch.vlog.Term( - karmaresearch.vlog.Term.TermType.VARIABLE, "y"); - final karmaresearch.vlog.Term expectedCx = new karmaresearch.vlog.Term( - karmaresearch.vlog.Term.TermType.CONSTANT, "x"); - final karmaresearch.vlog.Term expectedBx = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, - "x"); - final karmaresearch.vlog.Term[] expectedTermArray = { expectedVx, expectedCx, expectedVx, expectedBx, - expectedVy }; - - final karmaresearch.vlog.Term[] vLogTermArray = ModelToVLogConverter.toVLogTermArray(terms); - assertArrayEquals(expectedTermArray, vLogTermArray); - } - - @Test - public void testToVLogTermArrayEmpty() { - final List terms = new ArrayList<>(); - final karmaresearch.vlog.Term[] vLogTermArray = ModelToVLogConverter.toVLogTermArray(terms); - - assertNotNull(vLogTermArray); - assertTrue(vLogTermArray.length == 0); - } - - @Test - public void testToVLogFactTuples() { - final Constant c1 = Expressions.makeAbstractConstant("1"); - final Constant c2 = Expressions.makeAbstractConstant("2"); - final Constant c3 = Expressions.makeAbstractConstant("3"); - final Fact atom1 = Expressions.makeFact("p1", Arrays.asList(c1)); - final Fact atom2 = Expressions.makeFact("p2", Arrays.asList(c2, c3)); - - final String[][] vLogTuples = ModelToVLogConverter.toVLogFactTuples(Arrays.asList(atom1, atom2)); - - final String[][] expectedTuples = { { "1" }, { "2", "3" } }; - assertArrayEquals(expectedTuples, vLogTuples); - } - - @Test - public void testToVLogPredicate() { - final Predicate predicate = Expressions.makePredicate("pred", 1); - final String vLogPredicate = ModelToVLogConverter.toVLogPredicate(predicate); - assertEquals("pred-1", vLogPredicate); - } - - @Test - public void testToVLogAtom() { - final Constant c = Expressions.makeAbstractConstant("c"); - final Variable x = Expressions.makeUniversalVariable("x"); - final NamedNull b = new NamedNullImpl("_:b"); - final PositiveLiteral atom = Expressions.makePositiveLiteral("pred", c, x, b); - - final karmaresearch.vlog.Term expectedC = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, - "c"); - final karmaresearch.vlog.Term expectedX = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, - "x"); - final karmaresearch.vlog.Term expectedB = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, - "_:b"); - - final String expectedPredicateName = "pred" + ModelToVLogConverter.PREDICATE_ARITY_SUFFIX_SEPARATOR + 3; - final karmaresearch.vlog.Term[] expectedTerms = { expectedC, expectedX, expectedB }; - final karmaresearch.vlog.Atom expectedAtom = new karmaresearch.vlog.Atom(expectedPredicateName, expectedTerms); - - final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(atom); - assertEquals(expectedAtom, vLogAtom); - } - - @Test - public void testToVLogRuleArray() { - final Variable x = Expressions.makeUniversalVariable("x"); - final Variable y = Expressions.makeUniversalVariable("y"); - final Variable z = Expressions.makeUniversalVariable("z"); - final Variable w = Expressions.makeUniversalVariable("w"); - final Variable v = Expressions.makeExistentialVariable("v"); - final PositiveLiteral atomP1X = Expressions.makePositiveLiteral("p1", x); - final PositiveLiteral atomP2XY = Expressions.makePositiveLiteral("p2", x, y); - final PositiveLiteral atomP3YZ = Expressions.makePositiveLiteral("p3", y, z); - final Rule rule1 = Expressions.makeRule(atomP1X, atomP2XY, atomP3YZ); - final PositiveLiteral atomQXYZ = Expressions.makePositiveLiteral("q", x, y, z); - final PositiveLiteral atomQYW = Expressions.makePositiveLiteral("q", y, w); - final PositiveLiteral atomQ1XWZ = Expressions.makePositiveLiteral("q1", x, w, z); - final PositiveLiteral atomQ2XV = Expressions.makePositiveLiteral("q2", x, v); - final Rule rule2 = Expressions.makeRule(atomQ2XV, atomQ1XWZ, atomQYW, atomQXYZ); - - final karmaresearch.vlog.Term expX = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, - "x"); - final karmaresearch.vlog.Term expY = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, - "y"); - final karmaresearch.vlog.Term expZ = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, - "z"); - final karmaresearch.vlog.Term expW = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, - "w"); - final karmaresearch.vlog.Term expV = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, - "!v"); - final karmaresearch.vlog.Atom expAtomP1X = new karmaresearch.vlog.Atom("p1-1", expX); - final karmaresearch.vlog.Atom expAtomP2XY = new karmaresearch.vlog.Atom("p2-2", expX, expY); - final karmaresearch.vlog.Atom expAtomP3YZ = new karmaresearch.vlog.Atom("p3-2", expY, expZ); - final karmaresearch.vlog.Rule expectedRule1 = new karmaresearch.vlog.Rule( - new karmaresearch.vlog.Atom[] { expAtomP1X }, - new karmaresearch.vlog.Atom[] { expAtomP2XY, expAtomP3YZ }); - final karmaresearch.vlog.Atom expAtomQXYZ = new karmaresearch.vlog.Atom("q-3", expX, expY, expZ); - final karmaresearch.vlog.Atom expAtomQYW = new karmaresearch.vlog.Atom("q-2", expY, expW); - final karmaresearch.vlog.Atom expAtomQ1XWZ = new karmaresearch.vlog.Atom("q1-3", expX, expW, expZ); - final karmaresearch.vlog.Atom expAtomQ2XV = new karmaresearch.vlog.Atom("q2-2", expX, expV); - final karmaresearch.vlog.Rule expectedRule2 = new karmaresearch.vlog.Rule( - new karmaresearch.vlog.Atom[] { expAtomQ2XV }, - new karmaresearch.vlog.Atom[] { expAtomQ1XWZ, expAtomQYW, expAtomQXYZ }); - - final karmaresearch.vlog.Rule[] vLogRuleArray = ModelToVLogConverter - .toVLogRuleArray(Arrays.asList(rule1, rule2)); - final karmaresearch.vlog.Rule[] expectedRuleArray = new karmaresearch.vlog.Rule[] { expectedRule1, - expectedRule2 }; - assertArrayEquals(expectedRuleArray, vLogRuleArray); - } - - @Test - public void testVLogRuleRewritingStrategy() { - assertEquals(karmaresearch.vlog.VLog.RuleRewriteStrategy.NONE, - ModelToVLogConverter.toVLogRuleRewriteStrategy(RuleRewriteStrategy.NONE)); - assertEquals(karmaresearch.vlog.VLog.RuleRewriteStrategy.AGGRESSIVE, - ModelToVLogConverter.toVLogRuleRewriteStrategy(RuleRewriteStrategy.SPLIT_HEAD_PIECES)); - } - -} +package org.semanticweb.rulewerk.core.reasoner.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.RuleRewriteStrategy; + +public class ModelToVLogConverterTest { + + @Test + public void testToVLogTermVariable() { + final Variable variable = Expressions.makeUniversalVariable("var"); + final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term( + karmaresearch.vlog.Term.TermType.VARIABLE, "var"); + + final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(variable); + + assertEquals(expectedVLogTerm, vLogTerm); + } + + @Test + public void testToVLogTermAbstractConstant() { + final Constant constant = Expressions.makeAbstractConstant("const"); + final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term( + karmaresearch.vlog.Term.TermType.CONSTANT, "const"); + + final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(constant); + + assertEquals(expectedVLogTerm, vLogTerm); + assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstantName(constant.getName())); + assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstant(constant)); + } + + @Test + public void testToVLogTermAbstractConstantIri() { + final Constant constant = Expressions.makeAbstractConstant("http://example.org"); + final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term( + karmaresearch.vlog.Term.TermType.CONSTANT, ""); + + final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(constant); + + assertEquals(expectedVLogTerm, vLogTerm); + assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstantName(constant.getName())); + assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstant(constant)); + } + + @Test + public void testToVLogTermDatatypeConstant() { + final Constant constant = Expressions.makeDatatypeConstant("c", "http://example.org"); + final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term( + karmaresearch.vlog.Term.TermType.CONSTANT, "\"c\"^^"); + + final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(constant); + + assertEquals(expectedVLogTerm, vLogTerm); + assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstantName(constant.getName())); + assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstant(constant)); + } + + @Test + public void testToVLogTermLanguageStringConstant() { + final Constant constant = Expressions.makeLanguageStringConstant("c", "en"); + final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term( + karmaresearch.vlog.Term.TermType.CONSTANT, "\"c\"@en"); + + final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(constant); + + assertEquals(expectedVLogTerm, vLogTerm); + assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstantName(constant.getName())); + } + + @Test + public void testToVLogTermBlank() { + final NamedNull blank = new NamedNullImpl("blank"); + final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term( + karmaresearch.vlog.Term.TermType.BLANK, "blank"); + + final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(blank); + + assertEquals(expectedVLogTerm, vLogTerm); + } + + @Test + public void testToVLogTermArray() { + final Variable vx = Expressions.makeUniversalVariable("x"); + final Variable vxToo = Expressions.makeUniversalVariable("x"); + final Variable vy = Expressions.makeUniversalVariable("y"); + final Constant cx = Expressions.makeAbstractConstant("x"); + final NamedNull bx = new NamedNullImpl("x"); + final List terms = Arrays.asList(vx, cx, vxToo, bx, vy); + + final karmaresearch.vlog.Term expectedVx = new karmaresearch.vlog.Term( + karmaresearch.vlog.Term.TermType.VARIABLE, "x"); + final karmaresearch.vlog.Term expectedVy = new karmaresearch.vlog.Term( + karmaresearch.vlog.Term.TermType.VARIABLE, "y"); + final karmaresearch.vlog.Term expectedCx = new karmaresearch.vlog.Term( + karmaresearch.vlog.Term.TermType.CONSTANT, "x"); + final karmaresearch.vlog.Term expectedBx = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, + "x"); + final karmaresearch.vlog.Term[] expectedTermArray = { expectedVx, expectedCx, expectedVx, expectedBx, + expectedVy }; + + final karmaresearch.vlog.Term[] vLogTermArray = ModelToVLogConverter.toVLogTermArray(terms); + assertArrayEquals(expectedTermArray, vLogTermArray); + } + + @Test + public void testToVLogTermArrayEmpty() { + final List terms = new ArrayList<>(); + final karmaresearch.vlog.Term[] vLogTermArray = ModelToVLogConverter.toVLogTermArray(terms); + + assertNotNull(vLogTermArray); + assertTrue(vLogTermArray.length == 0); + } + + @Test + public void testToVLogFactTuples() { + final Constant c1 = Expressions.makeAbstractConstant("1"); + final Constant c2 = Expressions.makeAbstractConstant("2"); + final Constant c3 = Expressions.makeAbstractConstant("3"); + final Fact atom1 = Expressions.makeFact("p1", Arrays.asList(c1)); + final Fact atom2 = Expressions.makeFact("p2", Arrays.asList(c2, c3)); + + final String[][] vLogTuples = ModelToVLogConverter.toVLogFactTuples(Arrays.asList(atom1, atom2)); + + final String[][] expectedTuples = { { "1" }, { "2", "3" } }; + assertArrayEquals(expectedTuples, vLogTuples); + } + + @Test + public void testToVLogPredicate() { + final Predicate predicate = Expressions.makePredicate("pred", 1); + final String vLogPredicate = ModelToVLogConverter.toVLogPredicate(predicate); + assertEquals("pred-1", vLogPredicate); + } + + @Test + public void testToVLogAtom() { + final Constant c = Expressions.makeAbstractConstant("c"); + final Variable x = Expressions.makeUniversalVariable("x"); + final NamedNull b = new NamedNullImpl("_:b"); + final PositiveLiteral atom = Expressions.makePositiveLiteral("pred", c, x, b); + + final karmaresearch.vlog.Term expectedC = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, + "c"); + final karmaresearch.vlog.Term expectedX = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, + "x"); + final karmaresearch.vlog.Term expectedB = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, + "_:b"); + + final String expectedPredicateName = "pred" + ModelToVLogConverter.PREDICATE_ARITY_SUFFIX_SEPARATOR + 3; + final karmaresearch.vlog.Term[] expectedTerms = { expectedC, expectedX, expectedB }; + final karmaresearch.vlog.Atom expectedAtom = new karmaresearch.vlog.Atom(expectedPredicateName, expectedTerms); + + final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(atom); + assertEquals(expectedAtom, vLogAtom); + } + + @Test + public void testToVLogRuleArray() { + final Variable x = Expressions.makeUniversalVariable("x"); + final Variable y = Expressions.makeUniversalVariable("y"); + final Variable z = Expressions.makeUniversalVariable("z"); + final Variable w = Expressions.makeUniversalVariable("w"); + final Variable v = Expressions.makeExistentialVariable("v"); + final PositiveLiteral atomP1X = Expressions.makePositiveLiteral("p1", x); + final PositiveLiteral atomP2XY = Expressions.makePositiveLiteral("p2", x, y); + final PositiveLiteral atomP3YZ = Expressions.makePositiveLiteral("p3", y, z); + final Rule rule1 = Expressions.makeRule(atomP1X, atomP2XY, atomP3YZ); + final PositiveLiteral atomQXYZ = Expressions.makePositiveLiteral("q", x, y, z); + final PositiveLiteral atomQYW = Expressions.makePositiveLiteral("q", y, w); + final PositiveLiteral atomQ1XWZ = Expressions.makePositiveLiteral("q1", x, w, z); + final PositiveLiteral atomQ2XV = Expressions.makePositiveLiteral("q2", x, v); + final Rule rule2 = Expressions.makeRule(atomQ2XV, atomQ1XWZ, atomQYW, atomQXYZ); + + final karmaresearch.vlog.Term expX = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, + "x"); + final karmaresearch.vlog.Term expY = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, + "y"); + final karmaresearch.vlog.Term expZ = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, + "z"); + final karmaresearch.vlog.Term expW = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, + "w"); + final karmaresearch.vlog.Term expV = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, + "!v"); + final karmaresearch.vlog.Atom expAtomP1X = new karmaresearch.vlog.Atom("p1-1", expX); + final karmaresearch.vlog.Atom expAtomP2XY = new karmaresearch.vlog.Atom("p2-2", expX, expY); + final karmaresearch.vlog.Atom expAtomP3YZ = new karmaresearch.vlog.Atom("p3-2", expY, expZ); + final karmaresearch.vlog.Rule expectedRule1 = new karmaresearch.vlog.Rule( + new karmaresearch.vlog.Atom[] { expAtomP1X }, + new karmaresearch.vlog.Atom[] { expAtomP2XY, expAtomP3YZ }); + final karmaresearch.vlog.Atom expAtomQXYZ = new karmaresearch.vlog.Atom("q-3", expX, expY, expZ); + final karmaresearch.vlog.Atom expAtomQYW = new karmaresearch.vlog.Atom("q-2", expY, expW); + final karmaresearch.vlog.Atom expAtomQ1XWZ = new karmaresearch.vlog.Atom("q1-3", expX, expW, expZ); + final karmaresearch.vlog.Atom expAtomQ2XV = new karmaresearch.vlog.Atom("q2-2", expX, expV); + final karmaresearch.vlog.Rule expectedRule2 = new karmaresearch.vlog.Rule( + new karmaresearch.vlog.Atom[] { expAtomQ2XV }, + new karmaresearch.vlog.Atom[] { expAtomQ1XWZ, expAtomQYW, expAtomQXYZ }); + + final karmaresearch.vlog.Rule[] vLogRuleArray = ModelToVLogConverter + .toVLogRuleArray(Arrays.asList(rule1, rule2)); + final karmaresearch.vlog.Rule[] expectedRuleArray = new karmaresearch.vlog.Rule[] { expectedRule1, + expectedRule2 }; + assertArrayEquals(expectedRuleArray, vLogRuleArray); + } + + @Test + public void testVLogRuleRewritingStrategy() { + assertEquals(karmaresearch.vlog.VLog.RuleRewriteStrategy.NONE, + ModelToVLogConverter.toVLogRuleRewriteStrategy(RuleRewriteStrategy.NONE)); + assertEquals(karmaresearch.vlog.VLog.RuleRewriteStrategy.AGGRESSIVE, + ModelToVLogConverter.toVLogRuleRewriteStrategy(RuleRewriteStrategy.SPLIT_HEAD_PIECES)); + } + +} diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountTest.java index 9c5a993e8..2e7e0c29f 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java index af30f5a3f..ffbada38f 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImplTest.java index 0335dd845..724a41064 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImplTest.java @@ -1,60 +1,60 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; - -import java.util.Arrays; -import java.util.List; - -import org.junit.Test; -import org.semanticweb.rulewerk.core.model.api.Constant; -import org.semanticweb.rulewerk.core.model.api.QueryResult; -import org.semanticweb.rulewerk.core.model.api.Term; -import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.reasoner.implementation.QueryResultImpl; - -public class QueryResultImplTest { - - @Test - public void testEquals() { - final Constant c1 = Expressions.makeAbstractConstant("C"); - final Constant c2 = Expressions.makeAbstractConstant("ddd"); - final List constantList = Arrays.asList(c1, c1, c2); - - final QueryResult queryResult1 = new QueryResultImpl(constantList); - final QueryResult queryResult2 = new QueryResultImpl(Arrays.asList(c1, c1, c2)); - final QueryResult queryResult3 = new QueryResultImpl(Arrays.asList(c1, c2, c1)); - - assertEquals(queryResult1, queryResult1); - assertEquals(queryResult2, queryResult1); - assertEquals(queryResult2.hashCode(), queryResult1.hashCode()); - assertNotEquals(queryResult3, queryResult1); - assertNotEquals(queryResult3.hashCode(), queryResult1.hashCode()); - assertNotEquals(new QueryResultImpl(null), queryResult1); - assertEquals(new QueryResultImpl(null), new QueryResultImpl(null)); - assertFalse(queryResult1.equals(null)); - assertFalse(queryResult1.equals(constantList)); - } - -} +package org.semanticweb.rulewerk.core.reasoner.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; + +import java.util.Arrays; +import java.util.List; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.implementation.QueryResultImpl; + +public class QueryResultImplTest { + + @Test + public void testEquals() { + final Constant c1 = Expressions.makeAbstractConstant("C"); + final Constant c2 = Expressions.makeAbstractConstant("ddd"); + final List constantList = Arrays.asList(c1, c1, c2); + + final QueryResult queryResult1 = new QueryResultImpl(constantList); + final QueryResult queryResult2 = new QueryResultImpl(Arrays.asList(c1, c1, c2)); + final QueryResult queryResult3 = new QueryResultImpl(Arrays.asList(c1, c2, c1)); + + assertEquals(queryResult1, queryResult1); + assertEquals(queryResult2, queryResult1); + assertEquals(queryResult2.hashCode(), queryResult1.hashCode()); + assertNotEquals(queryResult3, queryResult1); + assertNotEquals(queryResult3.hashCode(), queryResult1.hashCode()); + assertNotEquals(new QueryResultImpl(null), queryResult1); + assertEquals(new QueryResultImpl(null), new QueryResultImpl(null)); + assertFalse(queryResult1.equals(null)); + assertFalse(queryResult1.equals(constantList)); + } + +} diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultsUtils.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultsUtils.java index 6bfbdf501..81ee7716e 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultsUtils.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultsUtils.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java index ba4730f83..c5baf8bde 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java @@ -4,16 +4,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SkolemizationTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SkolemizationTest.java index da644f50d..51dc0fe67 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SkolemizationTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SkolemizationTest.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java index fdc5ba789..9e9806113 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerBasics.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerBasics.java index d27cb9282..0c5ecf2e2 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerBasics.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerBasics.java @@ -1,108 +1,108 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; - -/* - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.io.IOException; -import java.util.Arrays; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import org.junit.Test; -import org.semanticweb.rulewerk.core.model.api.Constant; -import org.semanticweb.rulewerk.core.model.api.Fact; -import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; -import org.semanticweb.rulewerk.core.model.api.Rule; -import org.semanticweb.rulewerk.core.model.api.Term; -import org.semanticweb.rulewerk.core.model.api.Variable; -import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; -import org.semanticweb.rulewerk.core.reasoner.Reasoner; - -public class VLogReasonerBasics { - - final String constantNameC = "c"; - final String constantNameD = "d"; - - final Constant constantC = Expressions.makeAbstractConstant(constantNameC); - final Constant constantD = Expressions.makeAbstractConstant(constantNameD); - final Variable x = Expressions.makeUniversalVariable("x"); - final Fact factAc = Expressions.makeFact("A", Arrays.asList(constantC)); - final Fact factAd = Expressions.makeFact("A", Arrays.asList(constantD)); - final PositiveLiteral atomAx = Expressions.makePositiveLiteral("A", x); - final PositiveLiteral atomBx = Expressions.makePositiveLiteral("B", x); - final PositiveLiteral atomCx = Expressions.makePositiveLiteral("C", x); - final Rule ruleBxAx = Expressions.makeRule(atomBx, atomAx); - final Rule ruleCxBx = Expressions.makeRule(atomCx, atomBx); - - @Test(expected = NullPointerException.class) - public void testSetAlgorithmNull() { - try (final Reasoner reasoner = Reasoner.getInstance();) { - reasoner.setAlgorithm(null); - } - } - - @Test(expected = NullPointerException.class) - public void setRuleRewriteStrategy1() { - try (final Reasoner reasoner = Reasoner.getInstance();) { - reasoner.setRuleRewriteStrategy(null); - } - } - - @Test - public void testLoadRules() throws IOException { - final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(ruleBxAx, ruleCxBx, ruleBxAx); - - try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - assertEquals(Arrays.asList(ruleBxAx, ruleCxBx), kb.getRules()); - } - } - - @Test - public void testSimpleInference() throws IOException { - final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(ruleBxAx, ruleCxBx, factAc, factAd); - - try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.load(); - - final QueryResultIterator cxQueryResultEnumBeforeReasoning = reasoner.answerQuery(atomCx, true); - assertFalse(cxQueryResultEnumBeforeReasoning.hasNext()); - - reasoner.reason(); - - final QueryResultIterator cxQueryResultEnumAfterReasoning = reasoner.answerQuery(atomCx, true); - final Set> actualResults = QueryResultsUtils - .collectQueryResults(cxQueryResultEnumAfterReasoning); - - final Set> expectedResults = new HashSet<>( - Arrays.asList(Arrays.asList(constantC), Arrays.asList(constantD))); - - assertEquals(expectedResults, actualResults); - } - } - -} +package org.semanticweb.rulewerk.core.reasoner.implementation; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; + +/* + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; + +public class VLogReasonerBasics { + + final String constantNameC = "c"; + final String constantNameD = "d"; + + final Constant constantC = Expressions.makeAbstractConstant(constantNameC); + final Constant constantD = Expressions.makeAbstractConstant(constantNameD); + final Variable x = Expressions.makeUniversalVariable("x"); + final Fact factAc = Expressions.makeFact("A", Arrays.asList(constantC)); + final Fact factAd = Expressions.makeFact("A", Arrays.asList(constantD)); + final PositiveLiteral atomAx = Expressions.makePositiveLiteral("A", x); + final PositiveLiteral atomBx = Expressions.makePositiveLiteral("B", x); + final PositiveLiteral atomCx = Expressions.makePositiveLiteral("C", x); + final Rule ruleBxAx = Expressions.makeRule(atomBx, atomAx); + final Rule ruleCxBx = Expressions.makeRule(atomCx, atomBx); + + @Test(expected = NullPointerException.class) + public void testSetAlgorithmNull() { + try (final Reasoner reasoner = Reasoner.getInstance();) { + reasoner.setAlgorithm(null); + } + } + + @Test(expected = NullPointerException.class) + public void setRuleRewriteStrategy1() { + try (final Reasoner reasoner = Reasoner.getInstance();) { + reasoner.setRuleRewriteStrategy(null); + } + } + + @Test + public void testLoadRules() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleBxAx, ruleCxBx, ruleBxAx); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + assertEquals(Arrays.asList(ruleBxAx, ruleCxBx), kb.getRules()); + } + } + + @Test + public void testSimpleInference() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleBxAx, ruleCxBx, factAc, factAd); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + + final QueryResultIterator cxQueryResultEnumBeforeReasoning = reasoner.answerQuery(atomCx, true); + assertFalse(cxQueryResultEnumBeforeReasoning.hasNext()); + + reasoner.reason(); + + final QueryResultIterator cxQueryResultEnumAfterReasoning = reasoner.answerQuery(atomCx, true); + final Set> actualResults = QueryResultsUtils + .collectQueryResults(cxQueryResultEnumAfterReasoning); + + final Set> expectedResults = new HashSet<>( + Arrays.asList(Arrays.asList(constantC), Arrays.asList(constantD))); + + assertEquals(expectedResults, actualResults); + } + } + +} diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCombinedInputs.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCombinedInputs.java index dae941489..6c305e40d 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCombinedInputs.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCombinedInputs.java @@ -6,9 +6,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvInput.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvInput.java index a7d1f066a..bbdc37316 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvInput.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvInput.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvOutput.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvOutput.java index 3c4e023f3..7585f47bf 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvOutput.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvOutput.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerNegation.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerNegation.java index 213889d55..ecbc3ab07 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerNegation.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerNegation.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerRdfInput.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerRdfInput.java index 601e6f57b..0d76dc569 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerRdfInput.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerRdfInput.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerSparqlInput.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerSparqlInput.java index f5cb44aab..b95c1b005 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerSparqlInput.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerSparqlInput.java @@ -5,9 +5,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerStateTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerStateTest.java index a524ec507..8f85f806e 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerStateTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerStateTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java index dcaf16a5a..24e747cdb 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java @@ -34,16 +34,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverterTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverterTest.java index db398d51d..0ee0ddcce 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverterTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverterTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/ExportQueryResultToCsvFileTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/ExportQueryResultToCsvFileTest.java index 8c1f38594..89ad3228d 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/ExportQueryResultToCsvFileTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/ExportQueryResultToCsvFileTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/LargeAritiesTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/LargeAritiesTest.java index 5d9f00983..1ee33c9eb 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/LargeAritiesTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/LargeAritiesTest.java @@ -1,139 +1,139 @@ -package org.semanticweb.rulewerk.core.reasoner.vlog; - -import static org.junit.Assert.assertArrayEquals; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -import java.util.ArrayList; -import java.util.List; - -import org.junit.Test; - -import karmaresearch.vlog.Atom; -import karmaresearch.vlog.EDBConfigurationException; -import karmaresearch.vlog.NonExistingPredicateException; -import karmaresearch.vlog.NotStartedException; -import karmaresearch.vlog.Rule; -import karmaresearch.vlog.Term; -import karmaresearch.vlog.TermQueryResultIterator; -import karmaresearch.vlog.VLog; -import karmaresearch.vlog.VLog.RuleRewriteStrategy; - -/** - * Tests that reasoning and querying with predicates of large arities is - * allowed. - * - * @author Irina Dragoste - * - */ -public class LargeAritiesTest { - - final static int PREDICATE_ARITY_LIMIT = 255; - final static int VARIABLES_PER_RULE_LIMIT = 255; - - @Test - public void testLargeNumberOfVariablesPerRule() throws NotStartedException, EDBConfigurationException, NonExistingPredicateException { - testNumberOfVariablesPerRule(VARIABLES_PER_RULE_LIMIT); - } - - @Test(expected = IllegalArgumentException.class) - public void testNumberOfVariablesPerRuleExceedsLimit() throws NotStartedException, EDBConfigurationException, NonExistingPredicateException { - testNumberOfVariablesPerRule(VARIABLES_PER_RULE_LIMIT + 1); - } - - @Test - public void testLargePredicateArities() throws NotStartedException, EDBConfigurationException, NonExistingPredicateException { - testPredicateArity(PREDICATE_ARITY_LIMIT); - } - - @Test(expected = IllegalArgumentException.class) - public void testPredicateAritiesExceedLimit() throws NotStartedException, EDBConfigurationException, NonExistingPredicateException { - testPredicateArity(PREDICATE_ARITY_LIMIT + 1); - } - - private void testNumberOfVariablesPerRule(int variablesPerRuleLimit) - throws EDBConfigurationException, NotStartedException, NonExistingPredicateException { - final VLog vLog = new VLog(); - - final String[][] pFactArguments = { { "c" } }; - - final List body = new ArrayList<>(); - for (int i = 1; i <= variablesPerRuleLimit; i++) { - final String predicateName = "P" + i; - // Pi(xi) - body.add(VLogExpressions.makeAtom(predicateName, VLogExpressions.makeVariable("x" + i))); - // Pi(c) - vLog.addData(predicateName, pFactArguments); - } - final Atom head = VLogExpressions.makeAtom("q", VLogExpressions.makeVariable("x1")); - - // q(x1) :- P1(x1),...,Pn(xn) - final Rule rule = VLogExpressions.makeRule(head, body.toArray(new Atom[body.size()])); - - vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); - vLog.materialize(true); - try (final TermQueryResultIterator queryResultIterator = vLog.query(head, true, false)) { - assertTrue(queryResultIterator.hasNext()); - final Term[] queryResult = queryResultIterator.next(); - assertArrayEquals(new Term[] { VLogExpressions.makeConstant("c") }, queryResult); - - assertFalse(queryResultIterator.hasNext()); - } - vLog.stop(); - } - - private void testPredicateArity(final int predicateArityLimit) - throws EDBConfigurationException, NotStartedException, NonExistingPredicateException { - final List constants = new ArrayList<>(); - for (int i = 0; i < predicateArityLimit; i++) { - constants.add("c" + i); - } - final String[][] pFactArguments = { constants.toArray(new String[predicateArityLimit]) }; - - final List variables = new ArrayList<>(); - for (int i = 0; i < predicateArityLimit; i++) { - variables.add(VLogExpressions.makeVariable("x" + i)); - } - - final Term[] terms = variables.toArray(new Term[variables.size()]); - final Rule rule = VLogExpressions.makeRule(VLogExpressions.makeAtom("q", terms), - VLogExpressions.makeAtom("p", terms)); - final Atom queryAtomQPredicate = VLogExpressions.makeAtom("q", terms); - - final VLog vLog = new VLog(); - vLog.addData("p", pFactArguments); - - vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); - vLog.materialize(true); - try (final TermQueryResultIterator queryResultIterator = vLog.query(queryAtomQPredicate, true, false)) { - assertTrue(queryResultIterator.hasNext()); - final Term[] queryResult = queryResultIterator.next(); - assertTrue(queryResult.length == predicateArityLimit); - - assertFalse(queryResultIterator.hasNext()); - } - vLog.stop(); - } - -} +package org.semanticweb.rulewerk.core.reasoner.vlog; + +import static org.junit.Assert.assertArrayEquals; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.util.ArrayList; +import java.util.List; + +import org.junit.Test; + +import karmaresearch.vlog.Atom; +import karmaresearch.vlog.EDBConfigurationException; +import karmaresearch.vlog.NonExistingPredicateException; +import karmaresearch.vlog.NotStartedException; +import karmaresearch.vlog.Rule; +import karmaresearch.vlog.Term; +import karmaresearch.vlog.TermQueryResultIterator; +import karmaresearch.vlog.VLog; +import karmaresearch.vlog.VLog.RuleRewriteStrategy; + +/** + * Tests that reasoning and querying with predicates of large arities is + * allowed. + * + * @author Irina Dragoste + * + */ +public class LargeAritiesTest { + + final static int PREDICATE_ARITY_LIMIT = 255; + final static int VARIABLES_PER_RULE_LIMIT = 255; + + @Test + public void testLargeNumberOfVariablesPerRule() throws NotStartedException, EDBConfigurationException, NonExistingPredicateException { + testNumberOfVariablesPerRule(VARIABLES_PER_RULE_LIMIT); + } + + @Test(expected = IllegalArgumentException.class) + public void testNumberOfVariablesPerRuleExceedsLimit() throws NotStartedException, EDBConfigurationException, NonExistingPredicateException { + testNumberOfVariablesPerRule(VARIABLES_PER_RULE_LIMIT + 1); + } + + @Test + public void testLargePredicateArities() throws NotStartedException, EDBConfigurationException, NonExistingPredicateException { + testPredicateArity(PREDICATE_ARITY_LIMIT); + } + + @Test(expected = IllegalArgumentException.class) + public void testPredicateAritiesExceedLimit() throws NotStartedException, EDBConfigurationException, NonExistingPredicateException { + testPredicateArity(PREDICATE_ARITY_LIMIT + 1); + } + + private void testNumberOfVariablesPerRule(int variablesPerRuleLimit) + throws EDBConfigurationException, NotStartedException, NonExistingPredicateException { + final VLog vLog = new VLog(); + + final String[][] pFactArguments = { { "c" } }; + + final List body = new ArrayList<>(); + for (int i = 1; i <= variablesPerRuleLimit; i++) { + final String predicateName = "P" + i; + // Pi(xi) + body.add(VLogExpressions.makeAtom(predicateName, VLogExpressions.makeVariable("x" + i))); + // Pi(c) + vLog.addData(predicateName, pFactArguments); + } + final Atom head = VLogExpressions.makeAtom("q", VLogExpressions.makeVariable("x1")); + + // q(x1) :- P1(x1),...,Pn(xn) + final Rule rule = VLogExpressions.makeRule(head, body.toArray(new Atom[body.size()])); + + vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); + vLog.materialize(true); + try (final TermQueryResultIterator queryResultIterator = vLog.query(head, true, false)) { + assertTrue(queryResultIterator.hasNext()); + final Term[] queryResult = queryResultIterator.next(); + assertArrayEquals(new Term[] { VLogExpressions.makeConstant("c") }, queryResult); + + assertFalse(queryResultIterator.hasNext()); + } + vLog.stop(); + } + + private void testPredicateArity(final int predicateArityLimit) + throws EDBConfigurationException, NotStartedException, NonExistingPredicateException { + final List constants = new ArrayList<>(); + for (int i = 0; i < predicateArityLimit; i++) { + constants.add("c" + i); + } + final String[][] pFactArguments = { constants.toArray(new String[predicateArityLimit]) }; + + final List variables = new ArrayList<>(); + for (int i = 0; i < predicateArityLimit; i++) { + variables.add(VLogExpressions.makeVariable("x" + i)); + } + + final Term[] terms = variables.toArray(new Term[variables.size()]); + final Rule rule = VLogExpressions.makeRule(VLogExpressions.makeAtom("q", terms), + VLogExpressions.makeAtom("p", terms)); + final Atom queryAtomQPredicate = VLogExpressions.makeAtom("q", terms); + + final VLog vLog = new VLog(); + vLog.addData("p", pFactArguments); + + vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); + vLog.materialize(true); + try (final TermQueryResultIterator queryResultIterator = vLog.query(queryAtomQPredicate, true, false)) { + assertTrue(queryResultIterator.hasNext()); + final Term[] queryResult = queryResultIterator.next(); + assertTrue(queryResult.length == predicateArityLimit); + + assertFalse(queryResultIterator.hasNext()); + } + vLog.stop(); + } + +} diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/StratifiedNegationTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/StratifiedNegationTest.java index b4b89ae14..ee9041fda 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/StratifiedNegationTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/StratifiedNegationTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromCsvFileTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromCsvFileTest.java index 676dd79dd..afc81d080 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromCsvFileTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromCsvFileTest.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromMemoryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromMemoryTest.java index 1d9b7ca0d..fb2882349 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromMemoryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromMemoryTest.java @@ -1,248 +1,248 @@ -package org.semanticweb.rulewerk.core.reasoner.vlog; - -/* - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -import java.io.IOException; -import java.util.Arrays; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import org.apache.commons.lang3.StringUtils; -import org.junit.Assert; -import org.junit.Test; - -import karmaresearch.vlog.AlreadyStartedException; -import karmaresearch.vlog.EDBConfigurationException; -import karmaresearch.vlog.NonExistingPredicateException; -import karmaresearch.vlog.NotStartedException; -import karmaresearch.vlog.Rule; -import karmaresearch.vlog.Term; -import karmaresearch.vlog.TermQueryResultIterator; -import karmaresearch.vlog.VLog; -import karmaresearch.vlog.VLog.RuleRewriteStrategy; - -/** - * Tests VLog functionality when data (facts) is loaded exclusively from memory. - * - * @author Irina.Dragoste - * - */ -public class VLogDataFromMemoryTest { - - @Test - public void testVLogSimpleInference() throws AlreadyStartedException, EDBConfigurationException, IOException, - NotStartedException, NonExistingPredicateException { - - final String[][] argsAMatrix = { { "a" }, { "b" } }; - final karmaresearch.vlog.Term varX = VLogExpressions.makeVariable("x"); - final karmaresearch.vlog.Atom atomBx = new karmaresearch.vlog.Atom("B", varX); - final karmaresearch.vlog.Atom atomAx = new karmaresearch.vlog.Atom("A", varX); - final Rule rule = VLogExpressions.makeRule(atomBx, atomAx); - // tuples: [[a], [b]] - final Set> tuples = new HashSet<>(); - tuples.add(Arrays.asList(VLogExpressions.makeConstant("a"))); - tuples.add(Arrays.asList(VLogExpressions.makeConstant("b"))); - - // Start VLog - final VLog vLog = new VLog(); - vLog.addData("A", argsAMatrix); // Assert A(a), A(b) - vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); - - // Querying A(?X) before materialize - final TermQueryResultIterator queryResultIteratorAx1 = vLog.query(atomAx); - final Set> queryAxResults1 = VLogQueryResultUtils.collectResults(queryResultIteratorAx1); - assertEquals(tuples, queryAxResults1); - - // Querying B(?X) before materialize - final TermQueryResultIterator queryResultIteratorBx1 = vLog.query(atomBx); - assertFalse(queryResultIteratorBx1.hasNext()); - queryResultIteratorBx1.close(); - - vLog.materialize(true); - - // Querying B(?X) after materialize - final TermQueryResultIterator queryResultIteratorBx2 = vLog.query(atomBx); - final Set> queryResultsBx = VLogQueryResultUtils.collectResults(queryResultIteratorBx2); - assertEquals(tuples, queryResultsBx); - - final TermQueryResultIterator queryResultIteratorAx2 = vLog.query(atomAx); - final Set> queryAxResults2 = VLogQueryResultUtils.collectResults(queryResultIteratorAx2); - assertEquals(tuples, queryAxResults2); - - vLog.stop(); - } - - @Test - public void testBooleanQueryTrueIncludeConstantsFalse() throws AlreadyStartedException, EDBConfigurationException, - IOException, NotStartedException, NonExistingPredicateException { - // Creating rules and facts - final String[][] argsAMatrix = { { "a", "a" } }; - final karmaresearch.vlog.Term varX = VLogExpressions.makeVariable("X"); - final karmaresearch.vlog.Term varY = VLogExpressions.makeVariable("Y"); - final karmaresearch.vlog.Atom atomBx = new karmaresearch.vlog.Atom("B", varX, varY); - final karmaresearch.vlog.Atom atomAx = new karmaresearch.vlog.Atom("A", varX, varY); - final Rule rule = VLogExpressions.makeRule(atomBx, atomAx); - - // Start VLog - final VLog vLog = new VLog(); - vLog.addData("A", argsAMatrix); - vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); - vLog.materialize(true); - - // Querying B(a) - final karmaresearch.vlog.Term constantA = VLogExpressions.makeConstant("a"); - final karmaresearch.vlog.Atom booleanQueryAtomBa = new karmaresearch.vlog.Atom("B", constantA, constantA); - - final TermQueryResultIterator defaultIteratorWithConstantsAndBlanks = vLog.query(booleanQueryAtomBa); - assertTrue(defaultIteratorWithConstantsAndBlanks.hasNext()); - final Term[] actualQueryResult = defaultIteratorWithConstantsAndBlanks.next(); - final Term[] expectedQueryResult = { constantA, constantA }; - Assert.assertArrayEquals(expectedQueryResult, actualQueryResult); - assertFalse(defaultIteratorWithConstantsAndBlanks.hasNext()); - defaultIteratorWithConstantsAndBlanks.close(); - - final TermQueryResultIterator iteratorNoConstantsNoBlanks = vLog.query(booleanQueryAtomBa, false, false); - assertTrue(iteratorNoConstantsNoBlanks.hasNext()); - assertTrue(iteratorNoConstantsNoBlanks.next().length == 0); - iteratorNoConstantsNoBlanks.close(); - - final TermQueryResultIterator iteratorNoConstantsWithBlanks = vLog.query(booleanQueryAtomBa, false, true); - assertTrue(iteratorNoConstantsWithBlanks.hasNext()); - Assert.assertTrue(iteratorNoConstantsWithBlanks.next().length == 0); - assertFalse(iteratorNoConstantsWithBlanks.hasNext()); - iteratorNoConstantsWithBlanks.close(); - - vLog.stop(); - } - - @Test - public void testBooleanQueryTrueIncludeConstantsTrue() throws AlreadyStartedException, EDBConfigurationException, - IOException, NotStartedException, NonExistingPredicateException { - // Creating rules and facts - final String[][] argsAMatrix = { { "a", "a" } }; - final karmaresearch.vlog.Term varX = VLogExpressions.makeVariable("X"); - final karmaresearch.vlog.Term varY = VLogExpressions.makeVariable("Y"); - final karmaresearch.vlog.Atom atomBx = new karmaresearch.vlog.Atom("B", varX, varY); - final karmaresearch.vlog.Atom atomAx = new karmaresearch.vlog.Atom("A", varX, varY); - final Rule rule = VLogExpressions.makeRule(atomBx, atomAx); // A(x,x) -> B(x,x) - - // Start VLog - final VLog vLog = new VLog(); - vLog.addData("A", argsAMatrix); // assert A(a,a) - vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); - vLog.materialize(true); - - // Querying B(a) - final karmaresearch.vlog.Term constantA = VLogExpressions.makeConstant("a"); - final karmaresearch.vlog.Atom booleanQueryAtomBa = new karmaresearch.vlog.Atom("B", constantA, constantA); - - final Term[] expectedQueryResult = { constantA, constantA }; - - final TermQueryResultIterator defaultIteratorWithConstantsAndBlanks = vLog.query(booleanQueryAtomBa); - assertTrue(defaultIteratorWithConstantsAndBlanks.hasNext()); - final Term[] actualQueryResult = defaultIteratorWithConstantsAndBlanks.next(); - Assert.assertArrayEquals(expectedQueryResult, actualQueryResult); - assertFalse(defaultIteratorWithConstantsAndBlanks.hasNext()); - defaultIteratorWithConstantsAndBlanks.close(); - - final TermQueryResultIterator iteratorWithConstantsAndBlanks = vLog.query(booleanQueryAtomBa, true, false); - assertTrue(iteratorWithConstantsAndBlanks.hasNext()); - final Term[] actualQueryResult3 = iteratorWithConstantsAndBlanks.next(); - Assert.assertArrayEquals(expectedQueryResult, actualQueryResult3); - assertFalse(iteratorWithConstantsAndBlanks.hasNext()); - iteratorWithConstantsAndBlanks.close(); - - final TermQueryResultIterator iteratorWithConstantsNoBlanks = vLog.query(booleanQueryAtomBa, true, true); - assertTrue(iteratorWithConstantsNoBlanks.hasNext()); - final Term[] actualQueryResult2 = iteratorWithConstantsNoBlanks.next(); - Assert.assertArrayEquals(expectedQueryResult, actualQueryResult2); - assertFalse(iteratorWithConstantsNoBlanks.hasNext()); - iteratorWithConstantsNoBlanks.close(); - - vLog.stop(); - } - - @Test - public void testBooleanQueryFalse() throws AlreadyStartedException, EDBConfigurationException, IOException, - NotStartedException, NonExistingPredicateException { - final String[][] argsAMatrix = { { "a" } }; - final karmaresearch.vlog.Term varX = VLogExpressions.makeVariable("X"); - final karmaresearch.vlog.Atom atomBx = new karmaresearch.vlog.Atom("B", varX); - final karmaresearch.vlog.Atom atomAx = new karmaresearch.vlog.Atom("A", varX); - final Rule rule = VLogExpressions.makeRule(atomBx, atomAx); - - // Start VLog - final VLog vLog = new VLog(); - vLog.addData("A", argsAMatrix); - vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); - vLog.materialize(true); - - // Querying B(a) - final karmaresearch.vlog.Term constantB = VLogExpressions.makeConstant("b"); - final karmaresearch.vlog.Atom booleanQueryAtomBb = new karmaresearch.vlog.Atom("B", constantB); - - final TermQueryResultIterator queryResultEnnumeration = vLog.query(booleanQueryAtomBb); - assertFalse(queryResultEnnumeration.hasNext()); - - queryResultEnnumeration.close(); - vLog.stop(); - } - - @Test(expected = NonExistingPredicateException.class) - public void queryEmptyKnowledgeBaseBeforeReasoning() throws NotStartedException, AlreadyStartedException, - EDBConfigurationException, IOException, NonExistingPredicateException { - // Start VLog - final VLog vLog = new VLog(); - try { - vLog.start(StringUtils.EMPTY, false); - - final karmaresearch.vlog.Atom queryAtom = new karmaresearch.vlog.Atom("P", - VLogExpressions.makeVariable("?x")); - - vLog.query(queryAtom); - } finally { - vLog.stop(); - } - } - - @Test(expected = NonExistingPredicateException.class) - public void queryEmptyKnowledgeBaseAfterReasoning() throws NotStartedException, AlreadyStartedException, - EDBConfigurationException, IOException, NonExistingPredicateException { - // Start VLog - final VLog vLog = new VLog(); - try { - vLog.start(StringUtils.EMPTY, false); - vLog.materialize(true); - - final karmaresearch.vlog.Atom queryAtom = new karmaresearch.vlog.Atom("P", - VLogExpressions.makeVariable("?x")); - - vLog.query(queryAtom); - } finally { - vLog.stop(); - } - } - -} \ No newline at end of file +package org.semanticweb.rulewerk.core.reasoner.vlog; + +/* + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.apache.commons.lang3.StringUtils; +import org.junit.Assert; +import org.junit.Test; + +import karmaresearch.vlog.AlreadyStartedException; +import karmaresearch.vlog.EDBConfigurationException; +import karmaresearch.vlog.NonExistingPredicateException; +import karmaresearch.vlog.NotStartedException; +import karmaresearch.vlog.Rule; +import karmaresearch.vlog.Term; +import karmaresearch.vlog.TermQueryResultIterator; +import karmaresearch.vlog.VLog; +import karmaresearch.vlog.VLog.RuleRewriteStrategy; + +/** + * Tests VLog functionality when data (facts) is loaded exclusively from memory. + * + * @author Irina.Dragoste + * + */ +public class VLogDataFromMemoryTest { + + @Test + public void testVLogSimpleInference() throws AlreadyStartedException, EDBConfigurationException, IOException, + NotStartedException, NonExistingPredicateException { + + final String[][] argsAMatrix = { { "a" }, { "b" } }; + final karmaresearch.vlog.Term varX = VLogExpressions.makeVariable("x"); + final karmaresearch.vlog.Atom atomBx = new karmaresearch.vlog.Atom("B", varX); + final karmaresearch.vlog.Atom atomAx = new karmaresearch.vlog.Atom("A", varX); + final Rule rule = VLogExpressions.makeRule(atomBx, atomAx); + // tuples: [[a], [b]] + final Set> tuples = new HashSet<>(); + tuples.add(Arrays.asList(VLogExpressions.makeConstant("a"))); + tuples.add(Arrays.asList(VLogExpressions.makeConstant("b"))); + + // Start VLog + final VLog vLog = new VLog(); + vLog.addData("A", argsAMatrix); // Assert A(a), A(b) + vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); + + // Querying A(?X) before materialize + final TermQueryResultIterator queryResultIteratorAx1 = vLog.query(atomAx); + final Set> queryAxResults1 = VLogQueryResultUtils.collectResults(queryResultIteratorAx1); + assertEquals(tuples, queryAxResults1); + + // Querying B(?X) before materialize + final TermQueryResultIterator queryResultIteratorBx1 = vLog.query(atomBx); + assertFalse(queryResultIteratorBx1.hasNext()); + queryResultIteratorBx1.close(); + + vLog.materialize(true); + + // Querying B(?X) after materialize + final TermQueryResultIterator queryResultIteratorBx2 = vLog.query(atomBx); + final Set> queryResultsBx = VLogQueryResultUtils.collectResults(queryResultIteratorBx2); + assertEquals(tuples, queryResultsBx); + + final TermQueryResultIterator queryResultIteratorAx2 = vLog.query(atomAx); + final Set> queryAxResults2 = VLogQueryResultUtils.collectResults(queryResultIteratorAx2); + assertEquals(tuples, queryAxResults2); + + vLog.stop(); + } + + @Test + public void testBooleanQueryTrueIncludeConstantsFalse() throws AlreadyStartedException, EDBConfigurationException, + IOException, NotStartedException, NonExistingPredicateException { + // Creating rules and facts + final String[][] argsAMatrix = { { "a", "a" } }; + final karmaresearch.vlog.Term varX = VLogExpressions.makeVariable("X"); + final karmaresearch.vlog.Term varY = VLogExpressions.makeVariable("Y"); + final karmaresearch.vlog.Atom atomBx = new karmaresearch.vlog.Atom("B", varX, varY); + final karmaresearch.vlog.Atom atomAx = new karmaresearch.vlog.Atom("A", varX, varY); + final Rule rule = VLogExpressions.makeRule(atomBx, atomAx); + + // Start VLog + final VLog vLog = new VLog(); + vLog.addData("A", argsAMatrix); + vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); + vLog.materialize(true); + + // Querying B(a) + final karmaresearch.vlog.Term constantA = VLogExpressions.makeConstant("a"); + final karmaresearch.vlog.Atom booleanQueryAtomBa = new karmaresearch.vlog.Atom("B", constantA, constantA); + + final TermQueryResultIterator defaultIteratorWithConstantsAndBlanks = vLog.query(booleanQueryAtomBa); + assertTrue(defaultIteratorWithConstantsAndBlanks.hasNext()); + final Term[] actualQueryResult = defaultIteratorWithConstantsAndBlanks.next(); + final Term[] expectedQueryResult = { constantA, constantA }; + Assert.assertArrayEquals(expectedQueryResult, actualQueryResult); + assertFalse(defaultIteratorWithConstantsAndBlanks.hasNext()); + defaultIteratorWithConstantsAndBlanks.close(); + + final TermQueryResultIterator iteratorNoConstantsNoBlanks = vLog.query(booleanQueryAtomBa, false, false); + assertTrue(iteratorNoConstantsNoBlanks.hasNext()); + assertTrue(iteratorNoConstantsNoBlanks.next().length == 0); + iteratorNoConstantsNoBlanks.close(); + + final TermQueryResultIterator iteratorNoConstantsWithBlanks = vLog.query(booleanQueryAtomBa, false, true); + assertTrue(iteratorNoConstantsWithBlanks.hasNext()); + Assert.assertTrue(iteratorNoConstantsWithBlanks.next().length == 0); + assertFalse(iteratorNoConstantsWithBlanks.hasNext()); + iteratorNoConstantsWithBlanks.close(); + + vLog.stop(); + } + + @Test + public void testBooleanQueryTrueIncludeConstantsTrue() throws AlreadyStartedException, EDBConfigurationException, + IOException, NotStartedException, NonExistingPredicateException { + // Creating rules and facts + final String[][] argsAMatrix = { { "a", "a" } }; + final karmaresearch.vlog.Term varX = VLogExpressions.makeVariable("X"); + final karmaresearch.vlog.Term varY = VLogExpressions.makeVariable("Y"); + final karmaresearch.vlog.Atom atomBx = new karmaresearch.vlog.Atom("B", varX, varY); + final karmaresearch.vlog.Atom atomAx = new karmaresearch.vlog.Atom("A", varX, varY); + final Rule rule = VLogExpressions.makeRule(atomBx, atomAx); // A(x,x) -> B(x,x) + + // Start VLog + final VLog vLog = new VLog(); + vLog.addData("A", argsAMatrix); // assert A(a,a) + vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); + vLog.materialize(true); + + // Querying B(a) + final karmaresearch.vlog.Term constantA = VLogExpressions.makeConstant("a"); + final karmaresearch.vlog.Atom booleanQueryAtomBa = new karmaresearch.vlog.Atom("B", constantA, constantA); + + final Term[] expectedQueryResult = { constantA, constantA }; + + final TermQueryResultIterator defaultIteratorWithConstantsAndBlanks = vLog.query(booleanQueryAtomBa); + assertTrue(defaultIteratorWithConstantsAndBlanks.hasNext()); + final Term[] actualQueryResult = defaultIteratorWithConstantsAndBlanks.next(); + Assert.assertArrayEquals(expectedQueryResult, actualQueryResult); + assertFalse(defaultIteratorWithConstantsAndBlanks.hasNext()); + defaultIteratorWithConstantsAndBlanks.close(); + + final TermQueryResultIterator iteratorWithConstantsAndBlanks = vLog.query(booleanQueryAtomBa, true, false); + assertTrue(iteratorWithConstantsAndBlanks.hasNext()); + final Term[] actualQueryResult3 = iteratorWithConstantsAndBlanks.next(); + Assert.assertArrayEquals(expectedQueryResult, actualQueryResult3); + assertFalse(iteratorWithConstantsAndBlanks.hasNext()); + iteratorWithConstantsAndBlanks.close(); + + final TermQueryResultIterator iteratorWithConstantsNoBlanks = vLog.query(booleanQueryAtomBa, true, true); + assertTrue(iteratorWithConstantsNoBlanks.hasNext()); + final Term[] actualQueryResult2 = iteratorWithConstantsNoBlanks.next(); + Assert.assertArrayEquals(expectedQueryResult, actualQueryResult2); + assertFalse(iteratorWithConstantsNoBlanks.hasNext()); + iteratorWithConstantsNoBlanks.close(); + + vLog.stop(); + } + + @Test + public void testBooleanQueryFalse() throws AlreadyStartedException, EDBConfigurationException, IOException, + NotStartedException, NonExistingPredicateException { + final String[][] argsAMatrix = { { "a" } }; + final karmaresearch.vlog.Term varX = VLogExpressions.makeVariable("X"); + final karmaresearch.vlog.Atom atomBx = new karmaresearch.vlog.Atom("B", varX); + final karmaresearch.vlog.Atom atomAx = new karmaresearch.vlog.Atom("A", varX); + final Rule rule = VLogExpressions.makeRule(atomBx, atomAx); + + // Start VLog + final VLog vLog = new VLog(); + vLog.addData("A", argsAMatrix); + vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); + vLog.materialize(true); + + // Querying B(a) + final karmaresearch.vlog.Term constantB = VLogExpressions.makeConstant("b"); + final karmaresearch.vlog.Atom booleanQueryAtomBb = new karmaresearch.vlog.Atom("B", constantB); + + final TermQueryResultIterator queryResultEnnumeration = vLog.query(booleanQueryAtomBb); + assertFalse(queryResultEnnumeration.hasNext()); + + queryResultEnnumeration.close(); + vLog.stop(); + } + + @Test(expected = NonExistingPredicateException.class) + public void queryEmptyKnowledgeBaseBeforeReasoning() throws NotStartedException, AlreadyStartedException, + EDBConfigurationException, IOException, NonExistingPredicateException { + // Start VLog + final VLog vLog = new VLog(); + try { + vLog.start(StringUtils.EMPTY, false); + + final karmaresearch.vlog.Atom queryAtom = new karmaresearch.vlog.Atom("P", + VLogExpressions.makeVariable("?x")); + + vLog.query(queryAtom); + } finally { + vLog.stop(); + } + } + + @Test(expected = NonExistingPredicateException.class) + public void queryEmptyKnowledgeBaseAfterReasoning() throws NotStartedException, AlreadyStartedException, + EDBConfigurationException, IOException, NonExistingPredicateException { + // Start VLog + final VLog vLog = new VLog(); + try { + vLog.start(StringUtils.EMPTY, false); + vLog.materialize(true); + + final karmaresearch.vlog.Atom queryAtom = new karmaresearch.vlog.Atom("P", + VLogExpressions.makeVariable("?x")); + + vLog.query(queryAtom); + } finally { + vLog.stop(); + } + } + +} diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromRdfFileTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromRdfFileTest.java index 622888073..a16b34c89 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromRdfFileTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromRdfFileTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogExpressions.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogExpressions.java index d03bf4fac..d87a36190 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogExpressions.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogExpressions.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryResultUtils.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryResultUtils.java index e61d46421..a23dae441 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryResultUtils.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryResultUtils.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryTest.java index df38a52ab..57ca22e3f 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogTermNamesTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogTermNamesTest.java index 428e31226..ed85ea768 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogTermNamesTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogTermNamesTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CompareWikidataDBpedia.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CompareWikidataDBpedia.java index 838a261f5..85f9e8b3b 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CompareWikidataDBpedia.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CompareWikidataDBpedia.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Examples + * Rulewerk Examples * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CountingTriangles.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CountingTriangles.java index fede387b0..b44afdbf8 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CountingTriangles.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CountingTriangles.java @@ -4,9 +4,9 @@ /*- * #%L - * VLog4j Examples + * Rulewerk Examples * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -78,4 +78,4 @@ public static void main(final String[] args) throws IOException, ParsingExceptio } -} \ No newline at end of file +} diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/DoidExample.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/DoidExample.java index 2f895847e..b80163fb4 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/DoidExample.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/DoidExample.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Examples + * Rulewerk Examples * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java index dafb60680..f379970ba 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Examples + * Rulewerk Examples * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java index 6ea419e31..0647c1edc 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Examples + * Rulewerk Examples * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/SimpleReasoningExample.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/SimpleReasoningExample.java index 9591dd3f0..89cadd193 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/SimpleReasoningExample.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/SimpleReasoningExample.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Examples + * Rulewerk Examples * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromCsvFile.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromCsvFile.java index 9fcb968a3..0b76c4bb3 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromCsvFile.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromCsvFile.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Examples + * Rulewerk Examples * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromRdfFile.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromRdfFile.java index 8e974a814..2aa2c02a2 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromRdfFile.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromRdfFile.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Examples + * Rulewerk Examples * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromSparqlQueryResults.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromSparqlQueryResults.java index 6f3a92d3f..8eb7a2a8f 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromSparqlQueryResults.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromSparqlQueryResults.java @@ -1,207 +1,207 @@ -package org.semanticweb.rulewerk.examples.core; - -/*- - * #%L - * VLog4j Examples - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.io.IOException; -import java.net.URL; -import java.util.Arrays; -import java.util.LinkedHashSet; -import java.util.List; - -import org.semanticweb.rulewerk.core.model.api.Conjunction; -import org.semanticweb.rulewerk.core.model.api.DataSource; -import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; -import org.semanticweb.rulewerk.core.model.api.Predicate; -import org.semanticweb.rulewerk.core.model.api.Rule; -import org.semanticweb.rulewerk.core.model.api.Term; -import org.semanticweb.rulewerk.core.model.api.Variable; -import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; -import org.semanticweb.rulewerk.core.reasoner.Reasoner; -import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; -import org.semanticweb.rulewerk.examples.ExamplesUtils; - -/** - * This is a simple example of adding data from the result of a SPARQL query on - * a remote database endpoint, using {@link SparqlQueryResultDataSource}. In - * this example, we will query Wikidata for titles of publications that have - * authors who have children together. - * - * @author Irina Dragoste - * - */ -public class AddDataFromSparqlQueryResults { - - /** - * WikiData author - * property id. - */ - private static final String WIKIDATA_AUTHOR_PROPERTY = "wdt:P50"; - /** - * WikiData title - * property id. Published title of a work, such as a newspaper article, a - * literary work, a website, or a performance work - */ - private static final String WIKIDATA_TITLE_PROPERTY = "wdt:P1476"; - /** - * WikiData mother - * property id. - */ - private static final String WIKIDATA_MOTHER_PROPERTY = "wdt:P25"; - /** - * WikiData father - * property id. - */ - private static final String WIKIDATA_FATHER_PROPERTY = "wdt:P22"; - - public static void main(final String[] args) throws IOException { - - ExamplesUtils.configureLogging(); - - /* - * The WikiData SPARQL query endpoint. - */ - final URL wikidataSparqlEndpoint = new URL("https://query.wikidata.org/sparql"); - - /* - * SPARQL query body that looks for publications where two authors of the - * publication are the mother, respectively father of the same child. - */ - final String queryBody = " ?publication " + WIKIDATA_TITLE_PROPERTY + " ?title ." + "?publication " - + WIKIDATA_AUTHOR_PROPERTY + " ?mother ." + " ?publication " + WIKIDATA_AUTHOR_PROPERTY + " ?father ." - + " ?child " + WIKIDATA_MOTHER_PROPERTY + " ?mother ." + " ?child " + WIKIDATA_FATHER_PROPERTY - + " ?father ."; - - final Variable titleVariable = Expressions.makeUniversalVariable("title"); - final Variable motherVariable = Expressions.makeUniversalVariable("mother"); - final Variable fatherVariable = Expressions.makeUniversalVariable("father"); - - /* - * The query variables are the variables from the query body which will appear - * in the query result, in the given order. Fact resulting from this query will - * have as terms the title of the publication, the mother publication author and - * the father publication author. - */ - final LinkedHashSet queryVariables = new LinkedHashSet<>( - Arrays.asList(titleVariable, motherVariable, fatherVariable)); - - /* - * We query Wikidata with the SPARQL query composed of the query variables and - * query body. The query result is a DataSource we will associate to a - * predicate. - */ - final DataSource sparqlQueryResultDataSource = new SparqlQueryResultDataSource(wikidataSparqlEndpoint, - queryVariables, queryBody); - - /* - * Predicate that will be mapped to the SPARQL query result. It must have the - * same arity as the query variables size. In this case, we have 3 query - * variables (title, mother and father). - */ - final Predicate queryPredicate = Expressions.makePredicate("publicationParents", 3); - - try (Reasoner reasoner = Reasoner.getInstance()) { - - final KnowledgeBase kb = reasoner.getKnowledgeBase(); - /* - * The SPARQL query results will be added to the reasoner knowledge base, as - * facts associated to the predicate publicationParents. - */ - - kb.addStatement(new DataSourceDeclarationImpl(queryPredicate, sparqlQueryResultDataSource)); - reasoner.reason(); - - /* - * We construct a query PositiveLiteral for the predicated associated to the - * SPARQL query result. - */ - final PositiveLiteral query = Expressions.makePositiveLiteral(queryPredicate, Expressions.makeUniversalVariable("x"), - Expressions.makeUniversalVariable("y"), Expressions.makeUniversalVariable("z")); - - /* We query the reasoner for facts of the SPARQL query result predicate. */ - System.out.println("Titles of publications by co-authors who have a child together:"); - try (QueryResultIterator queryResultIterator = reasoner.answerQuery(query, false)) { - queryResultIterator.forEachRemaining(queryResult -> { - final List queryResultTerms = queryResult.getTerms(); - - System.out.println("- title: " + queryResultTerms.get(0) + ", mother author: " - + queryResultTerms.get(1) + ", father author: " + queryResultTerms.get(2)); - }); - } - - /* - * To do some basic reasoning, we would now like to add the following rule that - * extracts (unique) mothers, fathers, and pairs from the queried data: - * haveChildrenTogether(?y, ?z), isMother(?y), isFather(?z) :- - * publicationParents(?x, ?y, ?z) . - */ - final PositiveLiteral haveChildrenTogether = Expressions.makePositiveLiteral("haveChildrenTogether", - Expressions.makeUniversalVariable("y"), Expressions.makeUniversalVariable("z")); - final PositiveLiteral isMother = Expressions.makePositiveLiteral("isMother", Expressions.makeUniversalVariable("y")); - final PositiveLiteral isFather = Expressions.makePositiveLiteral("isFather", Expressions.makeUniversalVariable("z")); - final Conjunction ruleHeadConjunction = Expressions - .makePositiveConjunction(haveChildrenTogether, isMother, isFather); - final Rule rule = Expressions.makeRule(ruleHeadConjunction, Expressions.makeConjunction(query)); - - /* - * We add the created rule, and reason on the data added from the Wikidata - * SPARQL query result. - */ - kb.addStatement(rule); - reasoner.reason(); - - /* We query the reasoner for facts of the haveChildrenTogether predicate. */ - System.out.println("Co-authors who have a child:"); - try (QueryResultIterator queryResultIterator = reasoner.answerQuery(haveChildrenTogether, false)) { - queryResultIterator.forEachRemaining(queryResult -> { - final List queryResultTerms = queryResult.getTerms(); - - System.out - .println("- author1: " + queryResultTerms.get(0) + ", author2: " + queryResultTerms.get(1)); - }); - } - - /* We query the reasoner for facts of the isMother predicate. */ - System.out.println("Mothers:"); - try (QueryResultIterator queryResultIterator = reasoner.answerQuery(isMother, false)) { - queryResultIterator.forEachRemaining(queryResult -> { - final List queryResultTerms = queryResult.getTerms(); - - System.out.println("- mother: " + queryResultTerms.get(0)); - }); - } - - /* We query the reasoner for facts of the isFather predicate. */ - System.out.println("Fathers:"); - try (QueryResultIterator queryResultIterator = reasoner.answerQuery(isFather, false)) { - queryResultIterator.forEachRemaining(queryResult -> { - final List queryResultTerms = queryResult.getTerms(); - - System.out.println("- father: " + queryResultTerms.get(0)); - }); - } - - } - } - -} +package org.semanticweb.rulewerk.examples.core; + +/*- + * #%L + * Rulewerk Examples + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; +import java.net.URL; +import java.util.Arrays; +import java.util.LinkedHashSet; +import java.util.List; + +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.rulewerk.examples.ExamplesUtils; + +/** + * This is a simple example of adding data from the result of a SPARQL query on + * a remote database endpoint, using {@link SparqlQueryResultDataSource}. In + * this example, we will query Wikidata for titles of publications that have + * authors who have children together. + * + * @author Irina Dragoste + * + */ +public class AddDataFromSparqlQueryResults { + + /** + * WikiData author + * property id. + */ + private static final String WIKIDATA_AUTHOR_PROPERTY = "wdt:P50"; + /** + * WikiData title + * property id. Published title of a work, such as a newspaper article, a + * literary work, a website, or a performance work + */ + private static final String WIKIDATA_TITLE_PROPERTY = "wdt:P1476"; + /** + * WikiData mother + * property id. + */ + private static final String WIKIDATA_MOTHER_PROPERTY = "wdt:P25"; + /** + * WikiData father + * property id. + */ + private static final String WIKIDATA_FATHER_PROPERTY = "wdt:P22"; + + public static void main(final String[] args) throws IOException { + + ExamplesUtils.configureLogging(); + + /* + * The WikiData SPARQL query endpoint. + */ + final URL wikidataSparqlEndpoint = new URL("https://query.wikidata.org/sparql"); + + /* + * SPARQL query body that looks for publications where two authors of the + * publication are the mother, respectively father of the same child. + */ + final String queryBody = " ?publication " + WIKIDATA_TITLE_PROPERTY + " ?title ." + "?publication " + + WIKIDATA_AUTHOR_PROPERTY + " ?mother ." + " ?publication " + WIKIDATA_AUTHOR_PROPERTY + " ?father ." + + " ?child " + WIKIDATA_MOTHER_PROPERTY + " ?mother ." + " ?child " + WIKIDATA_FATHER_PROPERTY + + " ?father ."; + + final Variable titleVariable = Expressions.makeUniversalVariable("title"); + final Variable motherVariable = Expressions.makeUniversalVariable("mother"); + final Variable fatherVariable = Expressions.makeUniversalVariable("father"); + + /* + * The query variables are the variables from the query body which will appear + * in the query result, in the given order. Fact resulting from this query will + * have as terms the title of the publication, the mother publication author and + * the father publication author. + */ + final LinkedHashSet queryVariables = new LinkedHashSet<>( + Arrays.asList(titleVariable, motherVariable, fatherVariable)); + + /* + * We query Wikidata with the SPARQL query composed of the query variables and + * query body. The query result is a DataSource we will associate to a + * predicate. + */ + final DataSource sparqlQueryResultDataSource = new SparqlQueryResultDataSource(wikidataSparqlEndpoint, + queryVariables, queryBody); + + /* + * Predicate that will be mapped to the SPARQL query result. It must have the + * same arity as the query variables size. In this case, we have 3 query + * variables (title, mother and father). + */ + final Predicate queryPredicate = Expressions.makePredicate("publicationParents", 3); + + try (Reasoner reasoner = Reasoner.getInstance()) { + + final KnowledgeBase kb = reasoner.getKnowledgeBase(); + /* + * The SPARQL query results will be added to the reasoner knowledge base, as + * facts associated to the predicate publicationParents. + */ + + kb.addStatement(new DataSourceDeclarationImpl(queryPredicate, sparqlQueryResultDataSource)); + reasoner.reason(); + + /* + * We construct a query PositiveLiteral for the predicated associated to the + * SPARQL query result. + */ + final PositiveLiteral query = Expressions.makePositiveLiteral(queryPredicate, Expressions.makeUniversalVariable("x"), + Expressions.makeUniversalVariable("y"), Expressions.makeUniversalVariable("z")); + + /* We query the reasoner for facts of the SPARQL query result predicate. */ + System.out.println("Titles of publications by co-authors who have a child together:"); + try (QueryResultIterator queryResultIterator = reasoner.answerQuery(query, false)) { + queryResultIterator.forEachRemaining(queryResult -> { + final List queryResultTerms = queryResult.getTerms(); + + System.out.println("- title: " + queryResultTerms.get(0) + ", mother author: " + + queryResultTerms.get(1) + ", father author: " + queryResultTerms.get(2)); + }); + } + + /* + * To do some basic reasoning, we would now like to add the following rule that + * extracts (unique) mothers, fathers, and pairs from the queried data: + * haveChildrenTogether(?y, ?z), isMother(?y), isFather(?z) :- + * publicationParents(?x, ?y, ?z) . + */ + final PositiveLiteral haveChildrenTogether = Expressions.makePositiveLiteral("haveChildrenTogether", + Expressions.makeUniversalVariable("y"), Expressions.makeUniversalVariable("z")); + final PositiveLiteral isMother = Expressions.makePositiveLiteral("isMother", Expressions.makeUniversalVariable("y")); + final PositiveLiteral isFather = Expressions.makePositiveLiteral("isFather", Expressions.makeUniversalVariable("z")); + final Conjunction ruleHeadConjunction = Expressions + .makePositiveConjunction(haveChildrenTogether, isMother, isFather); + final Rule rule = Expressions.makeRule(ruleHeadConjunction, Expressions.makeConjunction(query)); + + /* + * We add the created rule, and reason on the data added from the Wikidata + * SPARQL query result. + */ + kb.addStatement(rule); + reasoner.reason(); + + /* We query the reasoner for facts of the haveChildrenTogether predicate. */ + System.out.println("Co-authors who have a child:"); + try (QueryResultIterator queryResultIterator = reasoner.answerQuery(haveChildrenTogether, false)) { + queryResultIterator.forEachRemaining(queryResult -> { + final List queryResultTerms = queryResult.getTerms(); + + System.out + .println("- author1: " + queryResultTerms.get(0) + ", author2: " + queryResultTerms.get(1)); + }); + } + + /* We query the reasoner for facts of the isMother predicate. */ + System.out.println("Mothers:"); + try (QueryResultIterator queryResultIterator = reasoner.answerQuery(isMother, false)) { + queryResultIterator.forEachRemaining(queryResult -> { + final List queryResultTerms = queryResult.getTerms(); + + System.out.println("- mother: " + queryResultTerms.get(0)); + }); + } + + /* We query the reasoner for facts of the isFather predicate. */ + System.out.println("Fathers:"); + try (QueryResultIterator queryResultIterator = reasoner.answerQuery(isFather, false)) { + queryResultIterator.forEachRemaining(queryResult -> { + final List queryResultTerms = queryResult.getTerms(); + + System.out.println("- father: " + queryResultTerms.get(0)); + }); + } + + } + } + +} diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java index fdf14be9c..b29262a49 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Examples + * Rulewerk Examples * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/SkolemVsRestrictedChaseTermination.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/SkolemVsRestrictedChaseTermination.java index 387893b9d..ae4042817 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/SkolemVsRestrictedChaseTermination.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/SkolemVsRestrictedChaseTermination.java @@ -1,136 +1,136 @@ -package org.semanticweb.rulewerk.examples.core; - -/*- - * #%L - * VLog4j Examples - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.io.IOException; - -import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; -import org.semanticweb.rulewerk.core.reasoner.Algorithm; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.rulewerk.examples.ExamplesUtils; -import org.semanticweb.rulewerk.parser.ParsingException; -import org.semanticweb.rulewerk.parser.RuleParser; - -/** - * This example shows non-termination of the Skolem Chase, versus termination of - * the Restricted Chase on the same set of rules and facts. Note that the - * Restricted Chase is the default reasoning algorithm, as it terminates in most - * cases and generates a smaller number of facts. - * - * @author Irina Dragoste - * - */ -public class SkolemVsRestrictedChaseTermination { - - public static void main(final String[] args) throws IOException, ParsingException { - - ExamplesUtils.configureLogging(); - - final String facts = ""// define some facts: - + "bicycle(bicycle1) ." // - + "hasPart(bicycle1, wheel1) ." // - + "wheel(wheel1) ." // - + "bicycle(bicycle2) ."; - - final String rules = "" - // every bicycle has some part that is a wheel: - + "hasPart(?X, !Y), wheel(!Y) :- bicycle(?X) ." // - // every wheel is part of some bicycle: - + "isPartOf(?X, !Y), bicycle(!Y) :- wheel(?X) ." // - // hasPart and isPartOf are mutually inverse relations: - + "hasPart(?X, ?Y) :- isPartOf(?Y, ?X) ." // - + "isPartOf(?X, ?Y) :- hasPart(?Y, ?X) ."; - - /* - * 1. Load facts into a knowledge base - */ - final KnowledgeBase kb = RuleParser.parse(facts); - - /* - * 2. Load the knowledge base into the reasoner - */ - try (VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.reason(); - - /* - * 3. Query the reasoner before applying rules for fact materialisation - */ - final PositiveLiteral queryHasPart = RuleParser.parsePositiveLiteral("hasPart(?X, ?Y)"); - - /* See that there is no fact HasPartIDB before reasoning. */ - System.out.println("Before reasoning is started, no inferrences have been computed yet."); - ExamplesUtils.printOutQueryAnswers(queryHasPart, reasoner); - - /* - * 4. Load rules into the knowledge base - */ - RuleParser.parseInto(kb, rules); - /* - * 5. Materialise with the Skolem Chase. As the Skolem Chase is known not to - * terminate for this set of rules and facts, it is interrupted after one - * second. - */ - reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); - reasoner.setReasoningTimeout(1); - System.out.println("Starting Skolem Chase (a.k.a. semi-oblivious chase) with 1 second timeout ..."); - final boolean skolemChaseFinished = reasoner.reason(); - - /* Verify that the Skolem Chase did not terminate before timeout. */ - System.out.println("Has Skolem Chase algorithm finished before 1 second timeout? " + skolemChaseFinished); - /* - * See that the Skolem Chase generated a very large number of facts in 1 second, - * extensively introducing new unnamed individuals to satisfy existential - * restrictions. - */ - System.out.println("Before the timeout, the Skolem chase had produced " - + reasoner.countQueryAnswers(queryHasPart).getCount() + " results for hasPart(?X, ?Y)."); - - /* - * 6. We reset the reasoner to discard all inferences, and apply the Restricted - * Chase on the same set of rules and facts - */ - System.out.println(); - reasoner.resetReasoner(); - - /* - * 7. Materialise with the Restricted Chase. As the Restricted Chase is known to - * terminate for this set of rules and facts, we will not interrupt it. - */ - reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); - reasoner.setReasoningTimeout(null); - final long restrictedChaseStartTime = System.currentTimeMillis(); - System.out.println("Starting Restricted Chase (a.k.a. Standard Chase) without any timeout ... "); - reasoner.reason(); - - /* The Restricted Chase terminates: */ - final long restrictedChaseDuration = System.currentTimeMillis() - restrictedChaseStartTime; - System.out.println("The Restricted Chase finished in " + restrictedChaseDuration + " ms."); - - /* - * See that the Restricted Chase generated a small number of facts, reusing - * individuals that satisfy existential restrictions. - */ - ExamplesUtils.printOutQueryAnswers(queryHasPart, reasoner); - } - } - -} +package org.semanticweb.rulewerk.examples.core; + +/*- + * #%L + * Rulewerk Examples + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.reasoner.Algorithm; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.examples.ExamplesUtils; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; + +/** + * This example shows non-termination of the Skolem Chase, versus termination of + * the Restricted Chase on the same set of rules and facts. Note that the + * Restricted Chase is the default reasoning algorithm, as it terminates in most + * cases and generates a smaller number of facts. + * + * @author Irina Dragoste + * + */ +public class SkolemVsRestrictedChaseTermination { + + public static void main(final String[] args) throws IOException, ParsingException { + + ExamplesUtils.configureLogging(); + + final String facts = ""// define some facts: + + "bicycle(bicycle1) ." // + + "hasPart(bicycle1, wheel1) ." // + + "wheel(wheel1) ." // + + "bicycle(bicycle2) ."; + + final String rules = "" + // every bicycle has some part that is a wheel: + + "hasPart(?X, !Y), wheel(!Y) :- bicycle(?X) ." // + // every wheel is part of some bicycle: + + "isPartOf(?X, !Y), bicycle(!Y) :- wheel(?X) ." // + // hasPart and isPartOf are mutually inverse relations: + + "hasPart(?X, ?Y) :- isPartOf(?Y, ?X) ." // + + "isPartOf(?X, ?Y) :- hasPart(?Y, ?X) ."; + + /* + * 1. Load facts into a knowledge base + */ + final KnowledgeBase kb = RuleParser.parse(facts); + + /* + * 2. Load the knowledge base into the reasoner + */ + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + + /* + * 3. Query the reasoner before applying rules for fact materialisation + */ + final PositiveLiteral queryHasPart = RuleParser.parsePositiveLiteral("hasPart(?X, ?Y)"); + + /* See that there is no fact HasPartIDB before reasoning. */ + System.out.println("Before reasoning is started, no inferrences have been computed yet."); + ExamplesUtils.printOutQueryAnswers(queryHasPart, reasoner); + + /* + * 4. Load rules into the knowledge base + */ + RuleParser.parseInto(kb, rules); + /* + * 5. Materialise with the Skolem Chase. As the Skolem Chase is known not to + * terminate for this set of rules and facts, it is interrupted after one + * second. + */ + reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); + reasoner.setReasoningTimeout(1); + System.out.println("Starting Skolem Chase (a.k.a. semi-oblivious chase) with 1 second timeout ..."); + final boolean skolemChaseFinished = reasoner.reason(); + + /* Verify that the Skolem Chase did not terminate before timeout. */ + System.out.println("Has Skolem Chase algorithm finished before 1 second timeout? " + skolemChaseFinished); + /* + * See that the Skolem Chase generated a very large number of facts in 1 second, + * extensively introducing new unnamed individuals to satisfy existential + * restrictions. + */ + System.out.println("Before the timeout, the Skolem chase had produced " + + reasoner.countQueryAnswers(queryHasPart).getCount() + " results for hasPart(?X, ?Y)."); + + /* + * 6. We reset the reasoner to discard all inferences, and apply the Restricted + * Chase on the same set of rules and facts + */ + System.out.println(); + reasoner.resetReasoner(); + + /* + * 7. Materialise with the Restricted Chase. As the Restricted Chase is known to + * terminate for this set of rules and facts, we will not interrupt it. + */ + reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); + reasoner.setReasoningTimeout(null); + final long restrictedChaseStartTime = System.currentTimeMillis(); + System.out.println("Starting Restricted Chase (a.k.a. Standard Chase) without any timeout ... "); + reasoner.reason(); + + /* The Restricted Chase terminates: */ + final long restrictedChaseDuration = System.currentTimeMillis() - restrictedChaseStartTime; + System.out.println("The Restricted Chase finished in " + restrictedChaseDuration + " ms."); + + /* + * See that the Restricted Chase generated a small number of facts, reusing + * individuals that satisfy existential restrictions. + */ + ExamplesUtils.printOutQueryAnswers(queryHasPart, reasoner); + } + } + +} diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java index ccd12e5ec..646b65f5a 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Examples + * Rulewerk Examples * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromGraal.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromGraal.java index 55e498784..6000043b8 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromGraal.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromGraal.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Examples + * Rulewerk Examples * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java index 9e7b7504e..774b10265 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Examples + * Rulewerk Examples * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/owlapi/OwlOntologyToRulesAndFacts.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/owlapi/OwlOntologyToRulesAndFacts.java index 67be04091..dad6f26f1 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/owlapi/OwlOntologyToRulesAndFacts.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/owlapi/OwlOntologyToRulesAndFacts.java @@ -1,141 +1,141 @@ -package org.semanticweb.rulewerk.examples.owlapi; - -/*- - * #%L - * VLog4j Examples - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.io.File; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Set; - -import org.semanticweb.owlapi.apibinding.OWLManager; -import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.owlapi.model.OWLOntologyCreationException; -import org.semanticweb.owlapi.model.OWLOntologyManager; -import org.semanticweb.rulewerk.core.model.api.Constant; -import org.semanticweb.rulewerk.core.model.api.Fact; -import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; -import org.semanticweb.rulewerk.core.model.api.Rule; -import org.semanticweb.rulewerk.core.model.api.Term; -import org.semanticweb.rulewerk.core.model.api.Variable; -import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.rulewerk.examples.ExamplesUtils; -import org.semanticweb.rulewerk.owlapi.OwlToRulesConverter; - -/** - * This example shows how vlog4j-owlapi library (class - * {@link OwlToRulesConverter}) can be used to transform an OWL ontology into - * vlog4j-core {@link Rule}s and {@link Fact}s. - * - * @author Irina Dragoste - * - */ -public class OwlOntologyToRulesAndFacts { - - public static void main(final String[] args) throws OWLOntologyCreationException, IOException { - - /* Bike ontology is loaded from a Bike file using OWL API */ - final OWLOntologyManager ontologyManager = OWLManager.createOWLOntologyManager(); - final OWLOntology ontology = ontologyManager - .loadOntologyFromOntologyDocument(new File(ExamplesUtils.INPUT_FOLDER + "owl/bike.owl")); - - /* - * vlog4j.owlapi.OwlToRulesConverter can be used to convert the OWL axiom in - * source ontology to target Rule and Atom objects - */ - final OwlToRulesConverter owlToRulesConverter = new OwlToRulesConverter(); - owlToRulesConverter.addOntology(ontology); - - /* Print out the Rules extracted from bike ontology. */ - System.out.println("Rules extracted from Bike ontology:"); - final Set rules = owlToRulesConverter.getRules(); - for (final Rule rule : rules) { - System.out.println(" - rule: " + rule); - } - System.out.println(); - - /* Print out Facts extracted from bike ontology */ - System.out.println("Facts extracted from Bike ontology:"); - final Set facts = owlToRulesConverter.getFacts(); - for (final PositiveLiteral fact : facts) { - System.out.println(" - fact: " + fact); - } - System.out.println(); - - final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(new ArrayList<>(owlToRulesConverter.getRules())); - kb.addStatements(owlToRulesConverter.getFacts()); - - try (VLogReasoner reasoner = new VLogReasoner(kb)) { - /* - * Load rules and facts obtained from the ontology, and reason over loaded - * ontology with the default algorithm Restricted Chase - */ - System.out.println("Reasoning default algorithm: " + reasoner.getAlgorithm()); - reasoner.reason(); - - /* Query for the parts of bike constant "b2". */ - final Variable vx = Expressions.makeUniversalVariable("x"); - final Constant b2 = Expressions.makeAbstractConstant("http://www.bike-example.ontology#b2"); - - final PositiveLiteral b2HasPart = Expressions - .makePositiveLiteral("http://www.bike-example.ontology#hasPart", b2, vx); - System.out.println("Answers to query " + b2HasPart + " :"); - - /* - * See that an unnamed individual has been introduced to satisfy - * owl:someValuesFrom restriction: - * - * :Bike rdf:type owl:Class ; rdfs:subClassOf [ rdf:type owl:Restriction ; - * owl:onProperty :hasPart ; owl:someValuesFrom :Wheel ] . - */ - try (QueryResultIterator answers = reasoner.answerQuery(b2HasPart, true);) { - answers.forEachRemaining(answer -> { - final Term constantB2 = answer.getTerms().get(0); - final Term term = answer.getTerms().get(1); - System.out.println(" - " + constantB2 + " hasPart " + term); - System.out.println(" Term " + term + " is of type " + term.getType()); - }); - } - - final PositiveLiteral isPartOfB2 = Expressions - .makePositiveLiteral("http://www.bike-example.ontology#isPartOf", vx, b2); - - System.out.println("Answers to query " + isPartOfB2 + " :"); - /* - * See that the same unnamed individual is part of Bike b2, satisfying - * restriction :Wheel rdf:type owl:Class ; rdfs:subClassOf [ rdf:type - * owl:Restriction ; owl:onProperty :isPartOf ; owl:someValuesFrom :Bike ] . - */ - try (QueryResultIterator answers = reasoner.answerQuery(isPartOfB2, true);) { - answers.forEachRemaining(answer -> { - final Term term = answer.getTerms().get(0); - final Term constantB2 = answer.getTerms().get(1); - System.out.println(" - " + term + " isPartOf " + constantB2); - System.out.println(" Term " + term + " is of type " + term.getType()); - }); - } - - } - } -} +package org.semanticweb.rulewerk.examples.owlapi; + +/*- + * #%L + * Rulewerk Examples + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Set; + +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyCreationException; +import org.semanticweb.owlapi.model.OWLOntologyManager; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.examples.ExamplesUtils; +import org.semanticweb.rulewerk.owlapi.OwlToRulesConverter; + +/** + * This example shows how vlog4j-owlapi library (class + * {@link OwlToRulesConverter}) can be used to transform an OWL ontology into + * vlog4j-core {@link Rule}s and {@link Fact}s. + * + * @author Irina Dragoste + * + */ +public class OwlOntologyToRulesAndFacts { + + public static void main(final String[] args) throws OWLOntologyCreationException, IOException { + + /* Bike ontology is loaded from a Bike file using OWL API */ + final OWLOntologyManager ontologyManager = OWLManager.createOWLOntologyManager(); + final OWLOntology ontology = ontologyManager + .loadOntologyFromOntologyDocument(new File(ExamplesUtils.INPUT_FOLDER + "owl/bike.owl")); + + /* + * vlog4j.owlapi.OwlToRulesConverter can be used to convert the OWL axiom in + * source ontology to target Rule and Atom objects + */ + final OwlToRulesConverter owlToRulesConverter = new OwlToRulesConverter(); + owlToRulesConverter.addOntology(ontology); + + /* Print out the Rules extracted from bike ontology. */ + System.out.println("Rules extracted from Bike ontology:"); + final Set rules = owlToRulesConverter.getRules(); + for (final Rule rule : rules) { + System.out.println(" - rule: " + rule); + } + System.out.println(); + + /* Print out Facts extracted from bike ontology */ + System.out.println("Facts extracted from Bike ontology:"); + final Set facts = owlToRulesConverter.getFacts(); + for (final PositiveLiteral fact : facts) { + System.out.println(" - fact: " + fact); + } + System.out.println(); + + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(new ArrayList<>(owlToRulesConverter.getRules())); + kb.addStatements(owlToRulesConverter.getFacts()); + + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + /* + * Load rules and facts obtained from the ontology, and reason over loaded + * ontology with the default algorithm Restricted Chase + */ + System.out.println("Reasoning default algorithm: " + reasoner.getAlgorithm()); + reasoner.reason(); + + /* Query for the parts of bike constant "b2". */ + final Variable vx = Expressions.makeUniversalVariable("x"); + final Constant b2 = Expressions.makeAbstractConstant("http://www.bike-example.ontology#b2"); + + final PositiveLiteral b2HasPart = Expressions + .makePositiveLiteral("http://www.bike-example.ontology#hasPart", b2, vx); + System.out.println("Answers to query " + b2HasPart + " :"); + + /* + * See that an unnamed individual has been introduced to satisfy + * owl:someValuesFrom restriction: + * + * :Bike rdf:type owl:Class ; rdfs:subClassOf [ rdf:type owl:Restriction ; + * owl:onProperty :hasPart ; owl:someValuesFrom :Wheel ] . + */ + try (QueryResultIterator answers = reasoner.answerQuery(b2HasPart, true);) { + answers.forEachRemaining(answer -> { + final Term constantB2 = answer.getTerms().get(0); + final Term term = answer.getTerms().get(1); + System.out.println(" - " + constantB2 + " hasPart " + term); + System.out.println(" Term " + term + " is of type " + term.getType()); + }); + } + + final PositiveLiteral isPartOfB2 = Expressions + .makePositiveLiteral("http://www.bike-example.ontology#isPartOf", vx, b2); + + System.out.println("Answers to query " + isPartOfB2 + " :"); + /* + * See that the same unnamed individual is part of Bike b2, satisfying + * restriction :Wheel rdf:type owl:Class ; rdfs:subClassOf [ rdf:type + * owl:Restriction ; owl:onProperty :isPartOf ; owl:someValuesFrom :Bike ] . + */ + try (QueryResultIterator answers = reasoner.answerQuery(isPartOfB2, true);) { + answers.forEachRemaining(answer -> { + final Term term = answer.getTerms().get(0); + final Term constantB2 = answer.getTerms().get(1); + System.out.println(" - " + term + " isPartOf " + constantB2); + System.out.println(" Term " + term + " is of type " + term.getType()); + }); + } + + } + } +} diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java index d1aaa22e3..251f89777 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java @@ -1,184 +1,184 @@ -package org.semanticweb.rulewerk.examples.rdf; - -/*- - * #%L - * VLog4j Examples - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.URL; -import java.util.Set; - -import org.openrdf.model.Model; -import org.openrdf.model.impl.LinkedHashModel; -import org.openrdf.rio.RDFFormat; -import org.openrdf.rio.RDFHandlerException; -import org.openrdf.rio.RDFParseException; -import org.openrdf.rio.RDFParser; -import org.openrdf.rio.Rio; -import org.openrdf.rio.helpers.StatementCollector; -import org.semanticweb.rulewerk.core.model.api.Constant; -import org.semanticweb.rulewerk.core.model.api.Fact; -import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; -import org.semanticweb.rulewerk.core.model.api.Predicate; -import org.semanticweb.rulewerk.core.model.api.Variable; -import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.rulewerk.examples.ExamplesUtils; -import org.semanticweb.rulewerk.parser.ParsingException; -import org.semanticweb.rulewerk.parser.RuleParser; -import org.semanticweb.rulewerk.rdf.RdfModelConverter; - -/** - * This example shows how vlog4j-rdf library's utility class - * {@link RdfModelConverter} can be used to convert RDF {@link Model}s from - * various types of RDF resources to vlog4j-core {@code Atom} sets. - * - * @author Irina Dragoste - * - */ -public class AddDataFromRdfModel { - - public static void main(final String[] args) - throws IOException, RDFParseException, RDFHandlerException, URISyntaxException { - - ExamplesUtils.configureLogging(); - - /* - * Local file containing metadata of publications from ISWC'16 conference, in - * RDF/XML format. - */ - final File rdfXMLResourceFile = new File(ExamplesUtils.INPUT_FOLDER + "rdf/iswc-2016-complete-alignments.rdf"); - final FileInputStream inputStreamISWC2016 = new FileInputStream(rdfXMLResourceFile); - /* An RDF Model is obtained from parsing the RDF/XML resource. */ - final Model rdfModelISWC2016 = parseRdfResource(inputStreamISWC2016, rdfXMLResourceFile.toURI(), - RDFFormat.RDFXML); - - /* - * Using vlog4j-rdf library, we convert RDF Model triples to facts, each having - * the ternary predicate "TRIPLE". - */ - final Set tripleFactsISWC2016 = RdfModelConverter.rdfModelToFacts(rdfModelISWC2016); - System.out.println("Example triple fact from iswc-2016 dataset:"); - System.out.println(" - " + tripleFactsISWC2016.iterator().next()); - - /* - * URL of online resource containing metadata of publications from ISWC'17 - * conference, in TURTLE format. - */ - final URL turtleResourceURL = new URL( - "http://www.scholarlydata.org/dumps/conferences/alignments/iswc-2017-complete-alignments.ttl"); - final InputStream inputStreamISWC2017 = turtleResourceURL.openStream(); - /* An RDF Model is obtained from parsing the TURTLE resource. */ - final Model rdfModelISWC2017 = parseRdfResource(inputStreamISWC2017, turtleResourceURL.toURI(), - RDFFormat.TURTLE); - - /* - * Using vlog4j-rdf library, we convert RDF Model triples to facts, each having - * the ternary predicate "TRIPLE". - */ - final Set tripleFactsISWC2017 = RdfModelConverter.rdfModelToFacts(rdfModelISWC2017); - System.out.println("Example triple fact from iswc-2017 dataset:"); - System.out.println(" - " + tripleFactsISWC2017.iterator().next()); - - /** - * We wish to combine triples about a person's affiliation, an affiliation's - * organization and an organization's name, to find a person's organization - * name. - */ - - /* Predicate names of the triples found in both RDF files. */ - final Variable varPerson = Expressions.makeUniversalVariable("person"); - final Predicate predicateHasOrganizationName = Expressions.makePredicate("hasOrganizationName", 2); - - /* - * Rule that retrieves pairs of persons and their organization name: - */ - final String rules = "%%%% We specify the rules syntactically for convenience %%%\n" - + "@prefix cnf: ." - + "hasOrganizationName(?Person, ?OrgName) :- " - + " TRIPLE(?Person, cnf:hasAffiliation, ?Aff), TRIPLE(?Aff, cnf:withOrganisation, ?Org)," - + " TRIPLE(?Org, cnf:name, ?OrgName) ."; - KnowledgeBase kb; - try { - kb = RuleParser.parse(rules); - } catch (final ParsingException e) { - System.out.println("Failed to parse rules: " + e.getMessage()); - return; - } - kb.addStatements(tripleFactsISWC2016); - kb.addStatements(tripleFactsISWC2017); - - try (VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.reason(); - - /* We query for persons whose organization name is "TU Dresden" . */ - final Constant constantTuDresden = Expressions.makeDatatypeConstant("TU Dresden", - "http://www.w3.org/2001/XMLSchema#string"); - /* hasOrganizationName(?person, "TU Dresden") */ - final PositiveLiteral queryTUDresdenParticipantsAtISWC = Expressions - .makePositiveLiteral(predicateHasOrganizationName, varPerson, constantTuDresden); - - System.out.println("\nParticipants at ISWC'16 and '17 from Organization 'TU Dresden':"); - System.out.println("(Answers to query " + queryTUDresdenParticipantsAtISWC + ")\n"); - try (QueryResultIterator queryResultIterator = reasoner.answerQuery(queryTUDresdenParticipantsAtISWC, - false)) { - queryResultIterator.forEachRemaining(answer -> System.out - .println(" - " + answer.getTerms().get(0) + ", organization " + answer.getTerms().get(1))); - } - - } - - } - - /** - * Parses the data from the supplied InputStream, using the supplied baseURI to - * resolve any relative URI references. - * - * @param inputStream The content to be parsed, expected to be in the given - * {@code rdfFormat}. - * @param baseURI The URI associated with the data in the InputStream. - * @param rdfFormat The expected RDFformat of the inputStream resource that is - * to be parsed. - * @return A Model containing the RDF triples. Blanks have unique ids across - * different models. - * @throws IOException If an I/O error occurred while data was read from - * the InputStream. - * @throws RDFParseException If the parser has found an unrecoverable parse - * error. - * @throws RDFHandlerException If the configured statement handler has - * encountered an unrecoverable error. - */ - private static Model parseRdfResource(final InputStream inputStream, final URI baseURI, final RDFFormat rdfFormat) - throws IOException, RDFParseException, RDFHandlerException { - final Model model = new LinkedHashModel(); - final RDFParser rdfParser = Rio.createParser(rdfFormat); - rdfParser.setRDFHandler(new StatementCollector(model)); - rdfParser.parse(inputStream, baseURI.toString()); - - return model; - } - -} +package org.semanticweb.rulewerk.examples.rdf; + +/*- + * #%L + * Rulewerk Examples + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.util.Set; + +import org.openrdf.model.Model; +import org.openrdf.model.impl.LinkedHashModel; +import org.openrdf.rio.RDFFormat; +import org.openrdf.rio.RDFHandlerException; +import org.openrdf.rio.RDFParseException; +import org.openrdf.rio.RDFParser; +import org.openrdf.rio.Rio; +import org.openrdf.rio.helpers.StatementCollector; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.examples.ExamplesUtils; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; +import org.semanticweb.rulewerk.rdf.RdfModelConverter; + +/** + * This example shows how vlog4j-rdf library's utility class + * {@link RdfModelConverter} can be used to convert RDF {@link Model}s from + * various types of RDF resources to vlog4j-core {@code Atom} sets. + * + * @author Irina Dragoste + * + */ +public class AddDataFromRdfModel { + + public static void main(final String[] args) + throws IOException, RDFParseException, RDFHandlerException, URISyntaxException { + + ExamplesUtils.configureLogging(); + + /* + * Local file containing metadata of publications from ISWC'16 conference, in + * RDF/XML format. + */ + final File rdfXMLResourceFile = new File(ExamplesUtils.INPUT_FOLDER + "rdf/iswc-2016-complete-alignments.rdf"); + final FileInputStream inputStreamISWC2016 = new FileInputStream(rdfXMLResourceFile); + /* An RDF Model is obtained from parsing the RDF/XML resource. */ + final Model rdfModelISWC2016 = parseRdfResource(inputStreamISWC2016, rdfXMLResourceFile.toURI(), + RDFFormat.RDFXML); + + /* + * Using vlog4j-rdf library, we convert RDF Model triples to facts, each having + * the ternary predicate "TRIPLE". + */ + final Set tripleFactsISWC2016 = RdfModelConverter.rdfModelToFacts(rdfModelISWC2016); + System.out.println("Example triple fact from iswc-2016 dataset:"); + System.out.println(" - " + tripleFactsISWC2016.iterator().next()); + + /* + * URL of online resource containing metadata of publications from ISWC'17 + * conference, in TURTLE format. + */ + final URL turtleResourceURL = new URL( + "http://www.scholarlydata.org/dumps/conferences/alignments/iswc-2017-complete-alignments.ttl"); + final InputStream inputStreamISWC2017 = turtleResourceURL.openStream(); + /* An RDF Model is obtained from parsing the TURTLE resource. */ + final Model rdfModelISWC2017 = parseRdfResource(inputStreamISWC2017, turtleResourceURL.toURI(), + RDFFormat.TURTLE); + + /* + * Using vlog4j-rdf library, we convert RDF Model triples to facts, each having + * the ternary predicate "TRIPLE". + */ + final Set tripleFactsISWC2017 = RdfModelConverter.rdfModelToFacts(rdfModelISWC2017); + System.out.println("Example triple fact from iswc-2017 dataset:"); + System.out.println(" - " + tripleFactsISWC2017.iterator().next()); + + /** + * We wish to combine triples about a person's affiliation, an affiliation's + * organization and an organization's name, to find a person's organization + * name. + */ + + /* Predicate names of the triples found in both RDF files. */ + final Variable varPerson = Expressions.makeUniversalVariable("person"); + final Predicate predicateHasOrganizationName = Expressions.makePredicate("hasOrganizationName", 2); + + /* + * Rule that retrieves pairs of persons and their organization name: + */ + final String rules = "%%%% We specify the rules syntactically for convenience %%%\n" + + "@prefix cnf: ." + + "hasOrganizationName(?Person, ?OrgName) :- " + + " TRIPLE(?Person, cnf:hasAffiliation, ?Aff), TRIPLE(?Aff, cnf:withOrganisation, ?Org)," + + " TRIPLE(?Org, cnf:name, ?OrgName) ."; + KnowledgeBase kb; + try { + kb = RuleParser.parse(rules); + } catch (final ParsingException e) { + System.out.println("Failed to parse rules: " + e.getMessage()); + return; + } + kb.addStatements(tripleFactsISWC2016); + kb.addStatements(tripleFactsISWC2017); + + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + + /* We query for persons whose organization name is "TU Dresden" . */ + final Constant constantTuDresden = Expressions.makeDatatypeConstant("TU Dresden", + "http://www.w3.org/2001/XMLSchema#string"); + /* hasOrganizationName(?person, "TU Dresden") */ + final PositiveLiteral queryTUDresdenParticipantsAtISWC = Expressions + .makePositiveLiteral(predicateHasOrganizationName, varPerson, constantTuDresden); + + System.out.println("\nParticipants at ISWC'16 and '17 from Organization 'TU Dresden':"); + System.out.println("(Answers to query " + queryTUDresdenParticipantsAtISWC + ")\n"); + try (QueryResultIterator queryResultIterator = reasoner.answerQuery(queryTUDresdenParticipantsAtISWC, + false)) { + queryResultIterator.forEachRemaining(answer -> System.out + .println(" - " + answer.getTerms().get(0) + ", organization " + answer.getTerms().get(1))); + } + + } + + } + + /** + * Parses the data from the supplied InputStream, using the supplied baseURI to + * resolve any relative URI references. + * + * @param inputStream The content to be parsed, expected to be in the given + * {@code rdfFormat}. + * @param baseURI The URI associated with the data in the InputStream. + * @param rdfFormat The expected RDFformat of the inputStream resource that is + * to be parsed. + * @return A Model containing the RDF triples. Blanks have unique ids across + * different models. + * @throws IOException If an I/O error occurred while data was read from + * the InputStream. + * @throws RDFParseException If the parser has found an unrecoverable parse + * error. + * @throws RDFHandlerException If the configured statement handler has + * encountered an unrecoverable error. + */ + private static Model parseRdfResource(final InputStream inputStream, final URI baseURI, final RDFFormat rdfFormat) + throws IOException, RDFParseException, RDFHandlerException { + final Model model = new LinkedHashModel(); + final RDFParser rdfParser = Rio.createParser(rdfFormat); + rdfParser.setRDFHandler(new StatementCollector(model)); + rdfParser.parse(inputStream, baseURI.toString()); + + return model; + } + +} diff --git a/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConjunctiveQueryToRule.java b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConjunctiveQueryToRule.java index 773bed754..ada714cbb 100644 --- a/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConjunctiveQueryToRule.java +++ b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConjunctiveQueryToRule.java @@ -3,9 +3,9 @@ /*- * #%L - * VLog4J Graal Import Components + * Rulewerk Graal Import Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConvertException.java b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConvertException.java index e02365c20..0c077263e 100644 --- a/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConvertException.java +++ b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConvertException.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4J Graal Import Components + * Rulewerk Graal Import Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverter.java b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverter.java index 3a03c5993..f23cb6042 100644 --- a/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverter.java +++ b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverter.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4J Graal Import Components + * Rulewerk Graal Import Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverterTest.java b/rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverterTest.java index 3e81a6909..e522de86d 100644 --- a/rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverterTest.java +++ b/rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverterTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4J Graal Import Components + * Rulewerk Graal Import Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/AbstractClassToRuleConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/AbstractClassToRuleConverter.java index fdee7057b..7c8ab043c 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/AbstractClassToRuleConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/AbstractClassToRuleConverter.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j OWL API Support + * Rulewerk OWL API Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleBodyConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleBodyConverter.java index d77a95389..11818553f 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleBodyConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleBodyConverter.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j OWL API Support + * Rulewerk OWL API Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleHeadConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleHeadConverter.java index 4a958d114..2857286c6 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleHeadConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleHeadConverter.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j OWL API Support + * Rulewerk OWL API Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java index f16abe0f0..6564a0a8c 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java @@ -11,16 +11,16 @@ /*- * #%L - * VLog4j OWL API Support + * Rulewerk OWL API Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlFeatureNotSupportedException.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlFeatureNotSupportedException.java index 254da4dcd..4f1841ebc 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlFeatureNotSupportedException.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlFeatureNotSupportedException.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j OWL API Support + * Rulewerk OWL API Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java index 2a6f7ea05..aa02ca07a 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java @@ -11,9 +11,9 @@ /*- * #%L - * VLog4j OWL API Support + * Rulewerk OWL API Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverter.java index e5386caa4..4273bae87 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverter.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j OWL API Support + * Rulewerk OWL API Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverterTest.java b/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverterTest.java index 6ed6fee3a..d561b779c 100644 --- a/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverterTest.java +++ b/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverterTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j OWL API Support + * Rulewerk OWL API Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ConfigurableLiteralHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ConfigurableLiteralHandler.java index a6cd79f27..0bf3ea7be 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ConfigurableLiteralHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ConfigurableLiteralHandler.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DataSourceDeclarationHandler.java index bf89afe17..e17ae1d1e 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DataSourceDeclarationHandler.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2020 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DatatypeConstantHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DatatypeConstantHandler.java index 12ad24f5e..bc94fc7ba 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DatatypeConstantHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DatatypeConstantHandler.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java index f34bbc8c8..cf45c534a 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveArgument.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveArgument.java index 4d1b77764..c31270d35 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveArgument.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveArgument.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java index fca86b07b..01c9fc73c 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java index 4319a79ab..72e7d654a 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-syntax + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java index 22b268165..ee2a687f1 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParsingException.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParsingException.java index f98e5639f..abca75072 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParsingException.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParsingException.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-syntax + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java index ca28f7ea5..5ffdcf281 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java index eca5940b3..338b024e8 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java index d5833b1c6..b27f52d21 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java index 1bda9a0a2..e91632ac9 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2020 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java index 532219593..337475363 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java index c0130ddad..1de8df9f5 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java index 5c01d5b4c..f61a80218 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/SubParserFactory.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/SubParserFactory.java index ab7e498ad..9a615bf1b 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/SubParserFactory.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/SubParserFactory.java @@ -4,16 +4,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveArgumentTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveArgumentTest.java index 7b14d9094..2ce1af622 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveArgumentTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveArgumentTest.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java index 1e2b9adb6..ef0a4eb6c 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/EntityTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/EntityTest.java index aa166ede6..910e9375b 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/EntityTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/EntityTest.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/ParserConfigurationTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/ParserConfigurationTest.java index 4f22a9a3a..cae03b2c7 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/ParserConfigurationTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/ParserConfigurationTest.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Syntax + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/ParserTestUtils.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/ParserTestUtils.java index 89e5cd847..99c4e513e 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/ParserTestUtils.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/ParserTestUtils.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Syntax + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserConfigurableLiteralTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserConfigurableLiteralTest.java index 0e46aaf97..639b30c01 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserConfigurableLiteralTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserConfigurableLiteralTest.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java index c69fe217d..21919dd91 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Syntax + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserParseFactTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserParseFactTest.java index e8185268d..dee7199c1 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserParseFactTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserParseFactTest.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Syntax + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java index e0a4b38f2..f3e030e02 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Syntax + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBaseTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBaseTest.java index 037e2d666..0ba85b209 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBaseTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBaseTest.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java index e7473f587..e5547b8dd 100644 --- a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java +++ b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j RDF Support + * Rulewerk RDF Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java index 59b0ac934..6ead51c2f 100644 --- a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java +++ b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j RDF Support + * Rulewerk RDF Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfTestUtils.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfTestUtils.java index ca400540f..fa689c4ad 100644 --- a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfTestUtils.java +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfTestUtils.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j RDF Support + * Rulewerk RDF Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestConvertRdfFileToFacts.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestConvertRdfFileToFacts.java index c0f126e15..052ab1f5a 100644 --- a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestConvertRdfFileToFacts.java +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestConvertRdfFileToFacts.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j RDF Support + * Rulewerk RDF Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java index fc24fcaf5..6297a8968 100644 --- a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j RDF Support + * Rulewerk RDF Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. From dadfc5210dfba5d699600fe65b1f7e5d4f2d7d30 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Mar 2020 15:28:57 +0100 Subject: [PATCH 0568/1003] Rename all references of VLog4j --- .../rulewerk/client/picocli/ClientUtils.java | 8 +- .../client/picocli/PrintQueryResults.java | 12 +-- ...{VLog4jClient.java => RulewerkClient.java} | 16 +-- ...ze.java => RulewerkClientMaterialize.java} | 10 +- .../rulewerk/client/picocli/SaveModel.java | 8 +- .../client/picocli/SaveQueryResults.java | 8 +- .../IncompatiblePredicateArityException.java | 8 +- .../PrefixDeclarationException.java | 6 +- .../exceptions/ReasonerStateException.java | 10 +- ...jException.java => RulewerkException.java} | 16 +-- ...ion.java => RulewerkRuntimeException.java} | 14 +-- .../model/implementation/Expressions.java | 25 +++-- .../MergingPrefixDeclarationRegistry.java | 6 +- .../core/model/implementation/Serializer.java | 40 +++---- .../rulewerk/core/reasoner/KnowledgeBase.java | 14 +-- .../implementation/TermToVLogConverter.java | 34 +++--- .../reasoner/implementation/VLogReasoner.java | 6 +- .../MergingPrefixDeclarationRegistryTest.java | 8 +- .../VLogToModelConverterTest.java | 24 ++--- rulewerk-examples/README.md | 12 +-- .../examples/SimpleReasoningExample.java | 8 +- .../examples/graal/AddDataFromDlgpFile.java | 22 ++-- .../examples/graal/AddDataFromGraal.java | 20 ++-- .../examples/graal/DoidExampleGraal.java | 8 +- .../owlapi/OwlOntologyToRulesAndFacts.java | 14 +-- .../examples/rdf/AddDataFromRdfModel.java | 16 +-- .../rulewerk/graal/GraalConvertException.java | 12 ++- ...ava => GraalToRulewerkModelConverter.java} | 60 +++++------ ...=> GraalToRulewerkModelConverterTest.java} | 100 +++++++++--------- .../rulewerk/parser/ParserConfiguration.java | 6 +- .../rulewerk/parser/ParsingException.java | 8 +- .../rulewerk/parser/javacc/JavaCCParser.jj | 4 +- 32 files changed, 282 insertions(+), 281 deletions(-) rename rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/{VLog4jClient.java => RulewerkClient.java} (70%) rename rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/{VLog4jClientMaterialize.java => RulewerkClientMaterialize.java} (96%) rename rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/{VLog4jException.java => RulewerkException.java} (72%) rename rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/{VLog4jRuntimeException.java => RulewerkRuntimeException.java} (78%) rename rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/{GraalToVLog4JModelConverter.java => GraalToRulewerkModelConverter.java} (87%) rename rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/{GraalToVLog4JModelConverterTest.java => GraalToRulewerkModelConverterTest.java} (69%) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/ClientUtils.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/ClientUtils.java index edaf61f89..aca4cd136 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/ClientUtils.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/ClientUtils.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -31,7 +31,7 @@ import org.semanticweb.rulewerk.core.reasoner.Reasoner; /** - * Utility class for interacting with the vlog4j client. + * Utility class for interacting with the Rulewerk client. * * @author dragoste * @@ -96,7 +96,7 @@ public static void printOutQueryAnswers(final PositiveLiteral queryAtom, final R * * @param queryAtom query to be answered * @param reasoner reasoner to query on - * + * * @return number of answers to the given query */ public static int getQueryAnswerCount(final PositiveLiteral queryAtom, final Reasoner reasoner) { diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResults.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResults.java index 4f44b7f4c..a36f0d85d 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResults.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResults.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -33,21 +33,21 @@ public class PrintQueryResults { static final String configurationErrorMessage = "Configuration Error: @code{--print-query-result-size} and @code{--print-query-result} are mutually exclusive. Set only one to true."; /** - * If true, Vlog4jClient will print the size of the query result. Mutually + * If true, RulewerkClient will print the size of the query result. Mutually * exclusive with {@code --print-complete-query-result} * * @default true */ - @Option(names = "--print-query-result-size", description = "Boolean. If true, Vlog4jClient will print the size of the query result. True by default.") + @Option(names = "--print-query-result-size", description = "Boolean. If true, RulewerkClient will print the size of the query result. True by default.") private boolean sizeOnly = true; /** - * If true, Vlog4jClient will print the query result in stdout. Mutually + * If true, RulewerkClient will print the query result in stdout. Mutually * exclusive with {@code --print-query-result-size} * * @default false */ - @Option(names = "--print-complete-query-result", description = "Boolean. If true, Vlog4jClient will print the query result in stdout. False by default.") + @Option(names = "--print-complete-query-result", description = "Boolean. If true, RulewerkClient will print the query result in stdout. False by default.") private boolean complete = false; public PrintQueryResults() { diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClient.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClient.java similarity index 70% rename from rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClient.java rename to rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClient.java index c2db5d180..54de47a9b 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClient.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClient.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -25,22 +25,22 @@ import picocli.CommandLine.Command; /** - * Stand alone client for VLog4j. + * Stand alone client for Rulewerk. * * @author Larry Gonzalez * */ -@Command(name = "java -jar VLog4jClient.jar", description = "VLog4jClient: A command line client of VLog4j.", subcommands = { - VLog4jClientMaterialize.class }) -public class VLog4jClient implements Runnable { +@Command(name = "java -jar RulewerkClient.jar", description = "RulewerkClient: A command line client for Rulewerk.", subcommands = { + RulewerkClientMaterialize.class }) +public class RulewerkClient implements Runnable { public static void main(String[] args) { - CommandLine commandline = new CommandLine(new VLog4jClient()); + CommandLine commandline = new CommandLine(new RulewerkClient()); commandline.execute(args); } @Override public void run() { - (new CommandLine(new VLog4jClient())).usage(System.out); + (new CommandLine(new RulewerkClient())).usage(System.out); } } diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClientMaterialize.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClientMaterialize.java similarity index 96% rename from rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClientMaterialize.java rename to rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClientMaterialize.java index 961c3a8ed..91a4d0cda 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClientMaterialize.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClientMaterialize.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -45,12 +45,12 @@ * */ @Command(name = "materialize", description = "Execute the chase and store the literal's extensions") -public class VLog4jClientMaterialize implements Runnable { +public class RulewerkClientMaterialize implements Runnable { private final KnowledgeBase kb = new KnowledgeBase(); private final List queries = new ArrayList<>(); - @Option(names = "--rule-file", description = "Rule file(s) in {@link https://github.com/knowsys/vlog4j/wiki/Rule-syntax-grammar} syntax", required = true) + @Option(names = "--rule-file", description = "Rule file(s) in {@link https://github.com/knowsys/rulewerk/wiki/Rule-syntax-grammar} syntax", required = true) private final List ruleFiles = new ArrayList<>(); // TODO @@ -70,7 +70,7 @@ public class VLog4jClientMaterialize implements Runnable { @Option(names = "--timeout", description = "Timeout in seconds. Infinite by default", required = false) private int timeout = 0; - @Option(names = "--query", description = "Positive not-ground Literals to query after materialization in rls syntax. Vlog4jClient will print the size of its extension", required = true) + @Option(names = "--query", description = "Positive not-ground Literals to query after materialization in rls syntax. RulewerkClient will print the size of its extension", required = true) private List queryStrings = new ArrayList<>(); @ArgGroup(exclusive = false) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveModel.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveModel.java index 354fa52f4..bcd23d052 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveModel.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveModel.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -38,11 +38,11 @@ public class SaveModel { static final String wrongDirectoryErrorMessage = "Configuration Error: wrong @code{--output-model-directory}. Please check the path."; /** - * If true, Vlog4jClient will save the model in {@code --output-model-directory} + * If true, RulewerkClient will save the model in {@code --output-model-directory} * * @default false */ - @Option(names = "--save-model", description = "Boolean. If true, Vlog4jClient will save the model into --output-model-directory. False by default.") + @Option(names = "--save-model", description = "Boolean. If true, RulewerkClient will save the model into --output-model-directory. False by default.") private boolean saveModel = false; /** diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResults.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResults.java index 8d43da5fd..9ca9bd4fb 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResults.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResults.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -37,12 +37,12 @@ public class SaveQueryResults { static final String wrongDirectoryErrorMessage = "Configuration Error: wrong @code{--output-query-result-directory}. Please check the path."; /** - * If true, Vlog4jClient will save the query result in + * If true, RulewerkClient will save the query result in * {@code --output-query-result-directory} * * @default false */ - @Option(names = "--save-query-results", description = "Boolean. If true, Vlog4jClient will save the query result into --output-query-result-directory. False by default.") + @Option(names = "--save-query-results", description = "Boolean. If true, RulewerkClient will save the query result into --output-query-result-directory. False by default.") private boolean saveResults = false; /** diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/IncompatiblePredicateArityException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/IncompatiblePredicateArityException.java index 67f033a81..28e22ce99 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/IncompatiblePredicateArityException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/IncompatiblePredicateArityException.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -28,11 +28,11 @@ /** * Expression thrown when attempting to load facts for a {@link Predicate} from * a {@link DataSource} that does not contain data of the specified arity. - * + * * @author Irina Dragoste * */ -public class IncompatiblePredicateArityException extends VLog4jRuntimeException { +public class IncompatiblePredicateArityException extends RulewerkRuntimeException { private static final long serialVersionUID = -5081219042292721026L; private static final String messagePattern = "Predicate arity [{0}] of predicate [{1}] incompatible with arity [{2}] of the data source [{3}]!"; diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/PrefixDeclarationException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/PrefixDeclarationException.java index b81bfaffe..eacafd6de 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/PrefixDeclarationException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/PrefixDeclarationException.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,7 +20,7 @@ * #L% */ -public class PrefixDeclarationException extends VLog4jException { +public class PrefixDeclarationException extends RulewerkException { private static final long serialVersionUID = 787997047134745982L; public PrefixDeclarationException(String errorMessage) { diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/ReasonerStateException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/ReasonerStateException.java index 813035df3..af961ffda 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/ReasonerStateException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/ReasonerStateException.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -27,11 +27,11 @@ /** * Thrown when an operation that is invalid in current reasoner state is * attempted. - * + * * @author Irina Dragoste * */ -public class ReasonerStateException extends VLog4jRuntimeException { +public class ReasonerStateException extends RulewerkRuntimeException { /** * generated serial version UID @@ -42,7 +42,7 @@ public class ReasonerStateException extends VLog4jRuntimeException { /** * Creates an exception with a logging message for current reasoner state. - * + * * @param state * the current Reasoner state. * @param message diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkException.java similarity index 72% rename from rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jException.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkException.java index d8c046a19..ea0eaca0e 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkException.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,30 +21,30 @@ */ /** - * Top-level checked exception for VLog4j system. + * Top-level checked exception for Rulewerk system. * @author Irina Dragoste * */ -public class VLog4jException extends Exception { +public class RulewerkException extends Exception { /** * generated serial version UID */ private static final long serialVersionUID = 8305375071519734590L; - public VLog4jException(Throwable cause) { + public RulewerkException(Throwable cause) { super(cause); } - public VLog4jException(String message, Throwable cause) { + public RulewerkException(String message, Throwable cause) { super(message, cause); } - public VLog4jException(String message) { + public RulewerkException(String message) { super(message); } - public VLog4jException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { + public RulewerkException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { super(message, cause, enableSuppression, writableStackTrace); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jRuntimeException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkRuntimeException.java similarity index 78% rename from rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jRuntimeException.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkRuntimeException.java index 65a52d9c6..609e0f882 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jRuntimeException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkRuntimeException.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,23 +21,23 @@ */ /** - * Superclass of unchecked exceptions generated by VLog4j. - * + * Superclass of unchecked exceptions generated by Rulewerk. + * * @author Markus Kroetzsch * */ -public class VLog4jRuntimeException extends RuntimeException { +public class RulewerkRuntimeException extends RuntimeException { /** * Generated serial version ID. */ private static final long serialVersionUID = -6574826887294416900L; - public VLog4jRuntimeException(String message, Throwable cause) { + public RulewerkRuntimeException(String message, Throwable cause) { super(message, cause); } - public VLog4jRuntimeException(String message) { + public RulewerkRuntimeException(String message) { super(message); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java index 24998c007..74529fb51 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java @@ -1,7 +1,5 @@ package org.semanticweb.rulewerk.core.model.implementation; -import java.util.ArrayList; - /*- * #%L * Rulewerk Core Components @@ -11,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,6 +20,7 @@ * #L% */ +import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -41,8 +40,8 @@ /** * This utilities class provides static methods for creating terms and formulas - * in vlog4j. - * + * in Rulewerk. + * * @author Markus Krötzsch * */ @@ -57,7 +56,7 @@ private Expressions() { /** * Creates a {@link UniversalVariable}. - * + * * @param name name of the variable * @return a {@link UniversalVariable} corresponding to the input. */ @@ -67,7 +66,7 @@ public static UniversalVariable makeUniversalVariable(String name) { /** * Creates an {@link ExistentialVariable}. - * + * * @param name name of the variable * @return a {@link ExistentialVariable} corresponding to the input. */ @@ -77,7 +76,7 @@ public static ExistentialVariable makeExistentialVariable(String name) { /** * Creates an {@link AbstractConstant}. - * + * * @param name name of the constant * @return an {@link AbstractConstant} corresponding to the input. */ @@ -87,7 +86,7 @@ public static AbstractConstant makeAbstractConstant(String name) { /** * Creates a {@link DatatypeConstant} from the given input. - * + * * @param lexicalValue the lexical representation of the data value * @param datatypeIri the full absolute IRI of the datatype of this literal * @return a {@link DatatypeConstant} corresponding to the input. @@ -98,7 +97,7 @@ public static DatatypeConstant makeDatatypeConstant(String lexicalValue, String /** * Creates a {@link LanguageStringConstant} from the given input. - * + * * @param string the string value of the constant * @param languageTag the BCP 47 language tag of the constant; should be in * lower case @@ -110,7 +109,7 @@ public static LanguageStringConstant makeLanguageStringConstant(String string, S /** * Creates a {@link Predicate}. - * + * * @param name non-blank predicate name * @param arity predicate arity, strictly greater than 0 * @return a {@link Predicate} corresponding to the input. @@ -137,7 +136,7 @@ public static Fact makeFact(final String predicateName, final List terms) /** * Creates a {@code Fact}. - * + * * @param predicateName on-blank {@link Predicate} name * @param terms non-empty, non-null array of non-null terms * @return a {@link Fact} with given {@code terms} and {@link Predicate} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java index 77aa2038b..8b6ebe16b 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -46,7 +46,7 @@ final public class MergingPrefixDeclarationRegistry extends AbstractPrefixDeclar /** * Prefix string to use for generated prefix name */ - private static final String GENERATED_PREFIX_PREFIX_STRING = "vlog4j_generated_"; + private static final String GENERATED_PREFIX_PREFIX_STRING = "rulewerk_generated_"; public MergingPrefixDeclarationRegistry() { super(); diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index 9df2cde79..bbccf9e1b 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -16,9 +16,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -97,7 +97,7 @@ private Serializer() { /** * Creates a String representation of a given {@link Rule}. * - * @see Rule syntax . + * @see Rule syntax . * @param rule a {@link Rule}. * @return String representation corresponding to a given {@link Rule}. * @@ -109,7 +109,7 @@ public static String getString(final Rule rule) { /** * Creates a String representation of a given {@link Conjunction}. * - * @see Rule syntax . + * @see Rule syntax . * @param conjunction a {@link Conjunction} * @return String representation corresponding to a given {@link Conjunction}. */ @@ -130,7 +130,7 @@ public static String getString(final Conjunction conjunction) /** * Creates a String representation of a given {@link Literal}. * - * @see Rule syntax . + * @see Rule syntax . * @param literal a {@link Literal} * @return String representation corresponding to a given {@link Literal}. */ @@ -146,7 +146,7 @@ public static String getString(final Literal literal) { /** * Creates a String representation of a given {@link Fact}. * - * @see Rule syntax . + * @see Rule syntax . * @param fact a {@link Fact} * @return String representation corresponding to a given {@link Fact}. */ @@ -157,7 +157,7 @@ public static String getFactString(final Fact fact) { /** * Creates a String representation of a given {@link AbstractConstant}. * - * @see Rule syntax . + * @see Rule syntax . * @param constant a {@link AbstractConstant} * @param iriTransformer a function to transform IRIs with. * @return String representation corresponding to a given @@ -170,7 +170,7 @@ public static String getString(final AbstractConstant constant, FunctionRule syntax . + * @see Rule syntax . * @param constant a {@link AbstractConstant} * @return String representation corresponding to a given * {@link AbstractConstant}. @@ -183,7 +183,7 @@ public static String getString(final AbstractConstant constant) { * Creates a String representation corresponding to the name of a given * {@link LanguageStringConstant}. * - * @see Rule syntax . + * @see Rule syntax . * @param languageStringConstant a {@link LanguageStringConstant} * @return String representation corresponding to the name of a given * {@link LanguageStringConstant}. @@ -208,7 +208,7 @@ public static String getConstantName(final LanguageStringConstant languageString *
        • {@code "test"^^} results in {@code "test"^^}, modulo transformation of the datatype IRI.
        • *
        * - * @see Rule syntax . + * @see Rule syntax . * @param datatypeConstant a {@link DatatypeConstant} * @param iriTransformer a function to transform IRIs with. * @return String representation corresponding to a given @@ -253,7 +253,7 @@ public static String getString(final DatatypeConstant datatypeConstant) { * Creates a String representation corresponding to the name of a given * {@link DatatypeConstant} including an IRI. * - * @see Rule syntax . + * @see Rule syntax . * @param datatypeConstant a {@link DatatypeConstant} * @return String representation corresponding to a given * {@link DatatypeConstant}. @@ -268,7 +268,7 @@ public static String getConstantName(final DatatypeConstant datatypeConstant, * Creates a String representation corresponding to the name of a given * {@link DatatypeConstant} including an IRI. * - * @see Rule syntax . + * @see Rule syntax . * @param datatypeConstant a {@link DatatypeConstant} * @return String representation corresponding to a given * {@link DatatypeConstant}. @@ -281,7 +281,7 @@ public static String getConstantName(final DatatypeConstant datatypeConstant) { /** * Creates a String representation of a given {@link ExistentialVariable}. * - * @see Rule syntax . + * @see Rule syntax . * @param existentialVariable a {@link ExistentialVariable} * @return String representation corresponding to a given * {@link ExistentialVariable}. @@ -293,7 +293,7 @@ public static String getString(final ExistentialVariable existentialVariable) { /** * Creates a String representation of a given {@link UniversalVariable}. * - * @see Rule syntax . + * @see Rule syntax . * @param universalVariable a {@link UniversalVariable} * @return String representation corresponding to a given * {@link UniversalVariable}. @@ -305,7 +305,7 @@ public static String getString(final UniversalVariable universalVariable) { /** * Creates a String representation of a given {@link NamedNull}. * - * @see Rule syntax . + * @see Rule syntax . * @param namedNull a {@link NamedNull} * @return String representation corresponding to a given {@link NamedNull}. */ @@ -316,7 +316,7 @@ public static String getString(final NamedNull namedNull) { /** * Creates a String representation of a given {@link Predicate}. * - * @see Rule syntax . + * @see Rule syntax . * @param predicate a {@link Predicate} * @return String representation corresponding to a given {@link Predicate}. */ @@ -327,7 +327,7 @@ public static String getString(final Predicate predicate) { /** * Creates a String representation of a given {@link DataSourceDeclaration}. * - * @see Rule syntax . + * @see Rule syntax . * @param dataSourceDeclaration a {@link DataSourceDeclaration} * @return String representation corresponding to a given * {@link DataSourceDeclaration}. @@ -340,7 +340,7 @@ public static String getString(final DataSourceDeclaration dataSourceDeclaration /** * Creates a String representation of a given {@link CsvFileDataSource}. * - * @see Rule syntax .. + * @see Rule syntax .. * * @param csvFileDataSource * @return String representation corresponding to a given @@ -353,7 +353,7 @@ public static String getString(final CsvFileDataSource csvFileDataSource) { /** * Creates a String representation of a given {@link RdfFileDataSource}. * - * @see Rule syntax .. + * @see Rule syntax .. * * * @param rdfFileDataSource @@ -368,7 +368,7 @@ public static String getString(final RdfFileDataSource rdfFileDataSource) { * Creates a String representation of a given * {@link SparqlQueryResultDataSource}. * - * @see Rule syntax . + * @see Rule syntax . * * * @param dataSource diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java index 5ebe5560c..e125d3267 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -38,7 +38,7 @@ import org.apache.commons.lang3.Validate; import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; -import org.semanticweb.rulewerk.core.exceptions.VLog4jException; +import org.semanticweb.rulewerk.core.exceptions.RulewerkException; import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; @@ -451,12 +451,12 @@ Map> getFactsByPredicate() { * This is essentially * {@link org.semanticweb.rulewerk.parser.RuleParser#parseInto}, but we need to * avoid a circular dependency here -- this is also why we throw - * {@link VLog4jException} instead of + * {@link RulewerkException} instead of * {@link org.semanticweb.rulewerk.parser.ParsingException}. */ @FunctionalInterface public interface AdditionalInputParser { - void parseInto(InputStream stream, KnowledgeBase kb) throws IOException, VLog4jException; + void parseInto(InputStream stream, KnowledgeBase kb) throws IOException, RulewerkException; } /** @@ -469,10 +469,10 @@ public interface AdditionalInputParser { * @throws IOException when reading {@code file} fails * @throws IllegalArgumentException when {@code file} is null or has already * been imported - * @throws VLog4jException when parseFunction throws VLog4jException + * @throws RulewerkException when parseFunction throws RulewerkException */ public void importRulesFile(File file, AdditionalInputParser parseFunction) - throws VLog4jException, IOException, IllegalArgumentException { + throws RulewerkException, IOException, IllegalArgumentException { Validate.notNull(file, "file must not be null"); boolean isNewFile = importedFilePaths.add(file.getCanonicalPath()); diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java index 345bd8aaf..5fe7d568d 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -33,7 +33,7 @@ /** * A visitor that converts {@link Term}s of different types to corresponding * internal VLog model {@link karmaresearch.vlog.Term}s. - * + * * @author Irina Dragoste * */ @@ -69,17 +69,17 @@ public karmaresearch.vlog.Term visit(LanguageStringConstant term) { /** * Converts the given constant to the name of a constant in VLog. - * + * * @param constant * @return VLog constant string */ public static String getVLogNameForConstant(Constant constant) { if (constant.getType() == TermType.ABSTRACT_CONSTANT) { - String vLog4jConstantName = constant.getName(); - if (vLog4jConstantName.contains(":")) { // enclose IRIs with < > - return "<" + vLog4jConstantName + ">"; + String rulewerkConstantName = constant.getName(); + if (rulewerkConstantName.contains(":")) { // enclose IRIs with < > + return "<" + rulewerkConstantName + ">"; } else { // keep relative IRIs unchanged - return vLog4jConstantName; + return rulewerkConstantName; } } else { // datatype literal return constant.getName(); @@ -87,19 +87,19 @@ public static String getVLogNameForConstant(Constant constant) { } /** - * Converts the string representation of a constant in VLog4j directly to the + * Converts the string representation of a constant in Rulewerk directly to the * name of a constant in VLog, without parsing it into a {@link Constant} first. - * - * @param vLog4jConstantName + * + * @param rulewerkConstantName * @return VLog constant string */ - public static String getVLogNameForConstantName(String vLog4jConstantName) { - if (vLog4jConstantName.startsWith("\"")) { // keep datatype literal strings unchanged - return vLog4jConstantName; - } else if (vLog4jConstantName.contains(":")) { // enclose IRIs with < > - return "<" + vLog4jConstantName + ">"; + public static String getVLogNameForConstantName(String rulewerkConstantName) { + if (rulewerkConstantName.startsWith("\"")) { // keep datatype literal strings unchanged + return rulewerkConstantName; + } else if (rulewerkConstantName.contains(":")) { // enclose IRIs with < > + return "<" + rulewerkConstantName + ">"; } else { // keep relative IRIs unchanged - return vLog4jConstantName; + return rulewerkConstantName; } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java index 66f88d4bb..bfc8ab05a 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java @@ -55,9 +55,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -642,7 +642,7 @@ private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) { if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { try { load(); - } catch (final IOException e) { // FIXME: quick fix for https://github.com/knowsys/vlog4j/issues/128 + } catch (final IOException e) { // FIXME: quick fix for https://github.com/knowsys/rulewerk/issues/128 throw new RuntimeException(e); } } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java index d6889aae4..131d5a712 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -103,7 +103,7 @@ public void setPrefixIri_redeclarePrefix_succeeds() throws PrefixDeclarationExce @Test public void getFreshPrefix_registeredPrefix_returnsFreshPrefix() throws PrefixDeclarationException { - String prefix = "vlog4j_generated_"; + String prefix = "rulewerk_generated_"; prefixDeclarations.setPrefixIri(prefix + "0:", BASE + "generated/"); prefixDeclarations.setPrefixIri("eg:", BASE); prefixDeclarations.setPrefixIri("eg:", MORE_SPECIFIC); @@ -127,7 +127,7 @@ public void mergePrefixDeclarations_conflictingPrefixName_renamesConflictingPref prefixDeclarations.setPrefixIri("eg:", MORE_SPECIFIC); this.prefixDeclarations.mergePrefixDeclarations(prefixDeclarations); assertEquals(BASE, this.prefixDeclarations.getPrefixIri("eg:")); - assertEquals(MORE_SPECIFIC, this.prefixDeclarations.getPrefixIri("vlog4j_generated_0:")); + assertEquals(MORE_SPECIFIC, this.prefixDeclarations.getPrefixIri("rulewerk_generated_0:")); } @Test diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverterTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverterTest.java index 0ee0ddcce..0f67a6fa5 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverterTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverterTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -35,44 +35,44 @@ public class VLogToModelConverterTest { @Test public void testAbstractConstantConversion() { final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, "c"); - final Term vLog4jTerm = new AbstractConstantImpl("c"); + final Term rulewerkTerm = new AbstractConstantImpl("c"); final Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); - assertEquals(vLog4jTerm, convertedTerm); + assertEquals(rulewerkTerm, convertedTerm); } @Test public void testAbstractConstantIriConversion() { final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, ""); - final Term vLog4jTerm = new AbstractConstantImpl("http://example.org/test"); + final Term rulewerkTerm = new AbstractConstantImpl("http://example.org/test"); final Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); - assertEquals(vLog4jTerm, convertedTerm); + assertEquals(rulewerkTerm, convertedTerm); } @Test public void testDatatypeConstantConversion() { final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, "\"a\"^^"); - final Term vLog4jTerm = new DatatypeConstantImpl("a", "http://example.org/test"); + final Term rulewerkTerm = new DatatypeConstantImpl("a", "http://example.org/test"); final Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); - assertEquals(vLog4jTerm, convertedTerm); + assertEquals(rulewerkTerm, convertedTerm); } @Test public void testLanguageStringConversion() { final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, "\"Test\"@en"); - final Term vLog4jTerm = new LanguageStringConstantImpl("Test", "en"); + final Term rulewerkTerm = new LanguageStringConstantImpl("Test", "en"); final Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); - assertEquals(vLog4jTerm, convertedTerm); + assertEquals(rulewerkTerm, convertedTerm); } @Test public void testNamedNullConversion() { final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, "_123"); - final Term vLog4jTerm = new NamedNullImpl("_123"); + final Term rulewerkTerm = new NamedNullImpl("_123"); final Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); - assertEquals(vLog4jTerm, convertedTerm); + assertEquals(rulewerkTerm, convertedTerm); } @Test(expected = IllegalArgumentException.class) diff --git a/rulewerk-examples/README.md b/rulewerk-examples/README.md index b76702fd9..0bc31121e 100644 --- a/rulewerk-examples/README.md +++ b/rulewerk-examples/README.md @@ -1,6 +1,6 @@ -This project contains examples of different use-cases of **vlog4j** functionality. -- reasoning termination for various algorithms - **The Skolem** and **The Restricted Chase** : *SkolemVsRestrictedChaseTermination.java* -- adding facts from a **CSV file**; exporting query results to CSV: *AddDataFromCSVFile.java* -- adding facts from the result of a **SPARQL query** on a remote endpoint: *AddDataFromSparqlQueryResults.java* -- converting an **OWL ontology** into rules and facts; reasoning on an **OWL ontology** : *owlapi.OwlOntologyToRulesAndFacts.java* -- converting an **RDF resource** into facts: *rdf.AddDataFromRDFModel.java* +This project contains examples of different use-cases of **rulewerk** functionality. +- reasoning termination for various algorithms - **The Skolem** and **The Restricted Chase** : *SkolemVsRestrictedChaseTermination.java* +- adding facts from a **CSV file**; exporting query results to CSV: *AddDataFromCSVFile.java* +- adding facts from the result of a **SPARQL query** on a remote endpoint: *AddDataFromSparqlQueryResults.java* +- converting an **OWL ontology** into rules and facts; reasoning on an **OWL ontology** : *owlapi.OwlOntologyToRulesAndFacts.java* +- converting an **RDF resource** into facts: *rdf.AddDataFromRDFModel.java* diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/SimpleReasoningExample.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/SimpleReasoningExample.java index 89cadd193..28e870fa6 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/SimpleReasoningExample.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/SimpleReasoningExample.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -29,10 +29,10 @@ import org.semanticweb.rulewerk.parser.RuleParser; /** - * This example demonstrates the basic usage of VLog4j for rule reasoning. We + * This example demonstrates the basic usage of Rulewerk for rule reasoning. We * are using a fixed set of rules and facts defined in Java without any external * sources, and we query for some of the results. - * + * * @author Markus Kroetzsch * */ diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java index 646b65f5a..2dc50ca84 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -31,7 +31,7 @@ import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.examples.ExamplesUtils; import org.semanticweb.rulewerk.graal.GraalConjunctiveQueryToRule; -import org.semanticweb.rulewerk.graal.GraalToVLog4JModelConverter; +import org.semanticweb.rulewerk.graal.GraalToRulewerkModelConverter; import fr.lirmm.graphik.graal.api.core.Atom; import fr.lirmm.graphik.graal.api.core.ConjunctiveQuery; @@ -41,14 +41,14 @@ /** * This example shows how facts can be imported from files in the * DLGP/DLP format. - * + * * The Graal * {@link DlgpParser} is used to parse the program. This step requires a * {@link File}, {@link InputStream}, {@link Reader}, or {@link String} * containing or pointing to the program. - * + * * The {@link Atom Atoms}, {@link Rule Rules}, and {@link ConjunctiveQuery - * ConjunctiveQueries} are then converted for use by VLog4J. Take care to add + * ConjunctiveQueries} are then converted for use by Rulewerk. Take care to add * the rules resulting from the {@link ConjunctiveQuery ConjunctiveQueries} as * well as the {@link Rule Rules} to the {@link Reasoner}; see * {@link GraalConjunctiveQueryToRule} for details. @@ -66,7 +66,7 @@ public static void main(final String[] args) throws IOException { /* * 1. Parse the DLGP/DLP file using the DlgpParser. - * + * * DlgpParser supports Files, InputStreams, Readers, and Strings. While other * objects such as prefixes can also be part of the iterator, they are * automatically resolved and do not need to be handled here. @@ -86,14 +86,14 @@ public static void main(final String[] args) throws IOException { /* * 2. ConjunctiveQueries consist of a conjunction of literals and a set of - * answer variables. To query this with VLog4J, an additional rule needs to be + * answer variables. To query this with Rulewerk, an additional rule needs to be * added for each ConjunctiveQuery. See GraalConjunctiveQueryToRule for details. */ final List convertedConjunctiveQueries = new ArrayList<>(); for (final ConjunctiveQuery conjunctiveQuery : graalConjunctiveQueries) { final String queryUniqueId = "query" + convertedConjunctiveQueries.size(); - convertedConjunctiveQueries.add(GraalToVLog4JModelConverter.convertQuery(queryUniqueId, conjunctiveQuery)); + convertedConjunctiveQueries.add(GraalToRulewerkModelConverter.convertQuery(queryUniqueId, conjunctiveQuery)); } /* @@ -107,7 +107,7 @@ public static void main(final String[] args) throws IOException { /* * Add facts to the reasoner knowledge base */ - kb.addStatements(GraalToVLog4JModelConverter.convertAtomsToFacts(graalAtoms)); + kb.addStatements(GraalToRulewerkModelConverter.convertAtomsToFacts(graalAtoms)); /* * Load the knowledge base into the reasoner */ @@ -120,7 +120,7 @@ public static void main(final String[] args) throws IOException { /* * Add rules to the reasoner knowledge base */ - kb.addStatements(GraalToVLog4JModelConverter.convertRules(graalRules)); + kb.addStatements(GraalToRulewerkModelConverter.convertRules(graalRules)); for (final GraalConjunctiveQueryToRule graalConjunctiveQueryToRule : convertedConjunctiveQueries) { kb.addStatement(graalConjunctiveQueryToRule.getRule()); } diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromGraal.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromGraal.java index 6000043b8..b4f05470f 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromGraal.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromGraal.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -30,7 +30,7 @@ import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; import org.semanticweb.rulewerk.examples.ExamplesUtils; import org.semanticweb.rulewerk.graal.GraalConjunctiveQueryToRule; -import org.semanticweb.rulewerk.graal.GraalToVLog4JModelConverter; +import org.semanticweb.rulewerk.graal.GraalToRulewerkModelConverter; import fr.lirmm.graphik.graal.api.core.ConjunctiveQuery; import fr.lirmm.graphik.graal.io.dlp.DlgpParser; @@ -39,10 +39,10 @@ * This example shows how facts and rules can be imported from objects of the * Graal library. Special * care must be taken with the import of Graal {@link ConjunctiveQuery}-objects, - * since unlike with VLog4J, they represent both the query atom and the + * since unlike with Rulewerk, they represent both the query atom and the * corresponding rule. *

        - * In VLog4J, the reasoner is queried by a query Atom and the results are all + * In Rulewerk, the reasoner is queried by a query Atom and the results are all * facts matching this query Atom.
        * Answering a Graal {@link ConjunctiveQuery} over a certain knowledge base is * equivalent to adding a {@link Rule} to the knowledge base, prior to @@ -53,7 +53,7 @@ * as a a query Atom to obtain the results of the Graal * {@link ConjunctiveQuery}. *

        - * + * * @author Adrian Bielefeldt * */ @@ -67,7 +67,7 @@ public static void main(final String[] args) throws IOException { /* * 1.1 Rules to map external database (EDB) predicates to internal database - * predicates (IDB). Necessary because VLog4J requires separation between input + * predicates (IDB). Necessary because Rulewerk requires separation between input * predicates and predicates for which additional facts can be derived. */ graalRules.add(DlgpParser.parseRule("bicycleIDB(X) :- bicycleEDB(X).")); @@ -114,7 +114,7 @@ public static void main(final String[] args) throws IOException { * then querying with query(?b, ?w) The rule from convertedGraalConjunctiveQuery * needs to be added to the reasoner. */ - final GraalConjunctiveQueryToRule convertedGraalConjunctiveQuery = GraalToVLog4JModelConverter.convertQuery( + final GraalConjunctiveQueryToRule convertedGraalConjunctiveQuery = GraalToRulewerkModelConverter.convertQuery( "graalQuery", DlgpParser.parseQuery("?(B, W) :- bicycleIDB(B), wheelIDB(W), isPartOfIDB(W, B).")); /* @@ -128,7 +128,7 @@ public static void main(final String[] args) throws IOException { /* * Add facts to the reasoner knowledge base */ - kb.addStatements(GraalToVLog4JModelConverter.convertAtomsToFacts(graalAtoms)); + kb.addStatements(GraalToRulewerkModelConverter.convertAtomsToFacts(graalAtoms)); /* * Load the knowledge base into the reasoner */ @@ -143,7 +143,7 @@ public static void main(final String[] args) throws IOException { /* * Add rules to the reasoner knowledge base */ - kb.addStatements(GraalToVLog4JModelConverter.convertRules(graalRules)); + kb.addStatements(GraalToRulewerkModelConverter.convertRules(graalRules)); kb.addStatements(convertedGraalConjunctiveQuery.getRule()); /* diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java index 774b10265..6c1e9f19d 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -38,7 +38,7 @@ import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; import org.semanticweb.rulewerk.examples.DoidExample; import org.semanticweb.rulewerk.examples.ExamplesUtils; -import org.semanticweb.rulewerk.graal.GraalToVLog4JModelConverter; +import org.semanticweb.rulewerk.graal.GraalToRulewerkModelConverter; import fr.lirmm.graphik.graal.io.dlp.DlgpParser; @@ -100,7 +100,7 @@ public static void main(final String[] args) throws IOException { final Object object = parser.next(); if (object instanceof fr.lirmm.graphik.graal.api.core.Rule) { kb.addStatement( - GraalToVLog4JModelConverter.convertRule((fr.lirmm.graphik.graal.api.core.Rule) object)); + GraalToRulewerkModelConverter.convertRule((fr.lirmm.graphik.graal.api.core.Rule) object)); } } } diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/owlapi/OwlOntologyToRulesAndFacts.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/owlapi/OwlOntologyToRulesAndFacts.java index dad6f26f1..5bcb6bea0 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/owlapi/OwlOntologyToRulesAndFacts.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/owlapi/OwlOntologyToRulesAndFacts.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -43,10 +43,10 @@ import org.semanticweb.rulewerk.owlapi.OwlToRulesConverter; /** - * This example shows how vlog4j-owlapi library (class + * This example shows how rulewerk-owlapi library (class * {@link OwlToRulesConverter}) can be used to transform an OWL ontology into - * vlog4j-core {@link Rule}s and {@link Fact}s. - * + * rulewerk-core {@link Rule}s and {@link Fact}s. + * * @author Irina Dragoste * */ @@ -60,7 +60,7 @@ public static void main(final String[] args) throws OWLOntologyCreationException .loadOntologyFromOntologyDocument(new File(ExamplesUtils.INPUT_FOLDER + "owl/bike.owl")); /* - * vlog4j.owlapi.OwlToRulesConverter can be used to convert the OWL axiom in + * rulewerk.owlapi.OwlToRulesConverter can be used to convert the OWL axiom in * source ontology to target Rule and Atom objects */ final OwlToRulesConverter owlToRulesConverter = new OwlToRulesConverter(); @@ -105,7 +105,7 @@ public static void main(final String[] args) throws OWLOntologyCreationException /* * See that an unnamed individual has been introduced to satisfy * owl:someValuesFrom restriction: - * + * * :Bike rdf:type owl:Class ; rdfs:subClassOf [ rdf:type owl:Restriction ; * owl:onProperty :hasPart ; owl:someValuesFrom :Wheel ] . */ diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java index 251f89777..77af29d19 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -52,10 +52,10 @@ import org.semanticweb.rulewerk.rdf.RdfModelConverter; /** - * This example shows how vlog4j-rdf library's utility class + * This example shows how rulewerk-rdf library's utility class * {@link RdfModelConverter} can be used to convert RDF {@link Model}s from - * various types of RDF resources to vlog4j-core {@code Atom} sets. - * + * various types of RDF resources to rulewerk-core {@code Atom} sets. + * * @author Irina Dragoste * */ @@ -77,7 +77,7 @@ public static void main(final String[] args) RDFFormat.RDFXML); /* - * Using vlog4j-rdf library, we convert RDF Model triples to facts, each having + * Using rulewerk-rdf library, we convert RDF Model triples to facts, each having * the ternary predicate "TRIPLE". */ final Set tripleFactsISWC2016 = RdfModelConverter.rdfModelToFacts(rdfModelISWC2016); @@ -96,7 +96,7 @@ public static void main(final String[] args) RDFFormat.TURTLE); /* - * Using vlog4j-rdf library, we convert RDF Model triples to facts, each having + * Using rulewerk-rdf library, we convert RDF Model triples to facts, each having * the ternary predicate "TRIPLE". */ final Set tripleFactsISWC2017 = RdfModelConverter.rdfModelToFacts(rdfModelISWC2017); @@ -156,7 +156,7 @@ public static void main(final String[] args) /** * Parses the data from the supplied InputStream, using the supplied baseURI to * resolve any relative URI references. - * + * * @param inputStream The content to be parsed, expected to be in the given * {@code rdfFormat}. * @param baseURI The URI associated with the data in the InputStream. diff --git a/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConvertException.java b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConvertException.java index 0c077263e..a99563a3f 100644 --- a/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConvertException.java +++ b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConvertException.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,14 +20,16 @@ * #L% */ +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; + /** - * An exception to signify that a conversion from Graal data structures to VLog4J data structures could not + * An exception to signify that a conversion from Graal data structures to Rulewerk data structures could not * be made. - * + * * @author Adrian Bielefeldt * */ -public class GraalConvertException extends RuntimeException { +public class GraalConvertException extends RulewerkRuntimeException { /** * generated serial version UID diff --git a/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverter.java b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToRulewerkModelConverter.java similarity index 87% rename from rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverter.java rename to rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToRulewerkModelConverter.java index f23cb6042..5ab82d428 100644 --- a/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverter.java +++ b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToRulewerkModelConverter.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -43,28 +43,28 @@ /** * Utility class to convert * Graal data structures into - * VLog4J data structures. Labels ({@link ConjunctiveQuery#getLabel()}, + * Rulewerk data structures. Labels ({@link ConjunctiveQuery#getLabel()}, * {@link fr.lirmm.graphik.graal.api.core.Rule#getLabel() Rule.getLabel()}, or * {@link fr.lirmm.graphik.graal.api.core.Term#getLabel() Term.getLabel()}) are - * not converted since VLog4J does not support them. + * not converted since Rulewerk does not support them. * * @author Adrian Bielefeldt * */ -public final class GraalToVLog4JModelConverter { +public final class GraalToRulewerkModelConverter { - private GraalToVLog4JModelConverter() { + private GraalToRulewerkModelConverter() { } /** * Converts a {@link fr.lirmm.graphik.graal.api.core.Atom Graal Atom} into a - * {@link PositiveLiteral VLog4J PositiveLiteral}. + * {@link PositiveLiteral Rulewerk PositiveLiteral}. * * @param atom A {@link fr.lirmm.graphik.graal.api.core.Atom * Graal Atom} * @param existentialVariables set of variables that are existentially * quantified - * @return A {@link PositiveLiteral VLog4J PositiveLiteral} + * @return A {@link PositiveLiteral Rulewerk PositiveLiteral} */ public static PositiveLiteral convertAtom(final fr.lirmm.graphik.graal.api.core.Atom atom, final Set existentialVariables) { @@ -75,10 +75,10 @@ public static PositiveLiteral convertAtom(final fr.lirmm.graphik.graal.api.core. /** * Converts a {@link fr.lirmm.graphik.graal.api.core.Atom Graal Atom} into a - * {@link Fact VLog4J fact}. + * {@link Fact Rulewerk fact}. * * @param atom A {@link fr.lirmm.graphik.graal.api.core.Atom Graal Atom} - * @return A {@link Fact VLog4J fact} + * @return A {@link Fact Rulewerk fact} * @throws IllegalArgumentException if the converted atom contains terms that * cannot occur in facts */ @@ -90,12 +90,12 @@ public static Fact convertAtomToFact(final fr.lirmm.graphik.graal.api.core.Atom /** * Converts a {@link List} of {@link fr.lirmm.graphik.graal.api.core.Atom Graal - * Atoms} into a {@link List} of {@link PositiveLiteral VLog4J + * Atoms} into a {@link List} of {@link PositiveLiteral Rulewerk * PositiveLiterals}. * * @param atoms list of {@link fr.lirmm.graphik.graal.api.core.Atom Graal * Atoms}. - * @return A {@link List} of {@link PositiveLiteral VLog4J PositiveLiterals}. + * @return A {@link List} of {@link PositiveLiteral Rulewerk PositiveLiterals}. */ public static List convertAtoms(final List atoms) { final List result = new ArrayList<>(); @@ -107,11 +107,11 @@ public static List convertAtoms(final List convertAtomsToFacts(final List atoms) { final List result = new ArrayList<>(); @@ -122,13 +122,13 @@ public static List convertAtomsToFacts(final List convertAtomSet(final AtomSet atomSet, final Set existentialVariables) { @@ -146,11 +146,11 @@ private static Conjunction convertAtomSet(final AtomSet atomSet /** * Converts a {@link fr.lirmm.graphik.graal.api.core.Predicate Graal Predicate} - * into a {@link Predicate VLog4J Predicate}. + * into a {@link Predicate Rulewerk Predicate}. * * @param predicate A {@link fr.lirmm.graphik.graal.api.core.Predicate Graal * Predicate} - * @return A {@link Predicate VLog4J Predicate} + * @return A {@link Predicate Rulewerk Predicate} */ private static Predicate convertPredicate(final fr.lirmm.graphik.graal.api.core.Predicate predicate) { return Expressions.makePredicate(predicate.getIdentifier().toString(), predicate.getArity()); @@ -190,12 +190,12 @@ public static GraalConjunctiveQueryToRule convertQuery(final String ruleHeadPred if (conjunctiveQuery.getAtomSet().isEmpty()) { throw new GraalConvertException(MessageFormat.format( - "Graal ConjunctiveQuery {0} with empty body is not supported in VLog4j.", conjunctiveQuery)); + "Graal ConjunctiveQuery {0} with empty body is not supported in Rulewerk.", conjunctiveQuery)); } if (conjunctiveQuery.getAnswerVariables().isEmpty()) { throw new GraalConvertException(MessageFormat.format( - "Graal ConjunctiveQuery {0} with no answer variables is not supported in VLog4J.", + "Graal ConjunctiveQuery {0} with no answer variables is not supported in Rulewerk.", conjunctiveQuery)); } @@ -208,10 +208,10 @@ public static GraalConjunctiveQueryToRule convertQuery(final String ruleHeadPred /** * Converts a {@link fr.lirmm.graphik.graal.api.core.Rule Graal Rule} into a - * {@link Rule Vlog4J Rule}. + * {@link Rule Rulewerk Rule}. * * @param rule A {@link fr.lirmm.graphik.graal.api.core.Rule Graal Rule}. - * @return A {@link Rule Vlog4J Rule}. + * @return A {@link Rule Rulewerk Rule}. */ public static Rule convertRule(final fr.lirmm.graphik.graal.api.core.Rule rule) { final Conjunction head = convertAtomSet(rule.getHead(), rule.getExistentials()); @@ -221,11 +221,11 @@ public static Rule convertRule(final fr.lirmm.graphik.graal.api.core.Rule rule) /** * Converts a {@link List} of {@link fr.lirmm.graphik.graal.api.core.Rule Graal - * Rules} into a {@link List} of {@link Rule VLog4J Rules}. + * Rules} into a {@link List} of {@link Rule Rulewerk Rules}. * * @param rules A {@link List} of {@link fr.lirmm.graphik.graal.api.core.Rule * Graal Rules}. - * @return A {@link List} of {@link Rule VLog4J Rules}. + * @return A {@link List} of {@link Rule Rulewerk Rules}. */ public static List convertRules(final List rules) { final List result = new ArrayList<>(); @@ -237,7 +237,7 @@ public static List convertRules(final List convertRules(final List, if it * is a Constant.
        * Graal Variable with identifier "a" will be transformed to - * vlog4j Variable with name "a". Graal Constant with identifier - * "c" will be transformed to vlog4j Constant with name + * rulewerk Variable with name "a". Graal Constant with identifier + * "c" will be transformed to rulewerk Constant with name * "<c>". * * @throws GraalConvertException If the term is neither variable nor constant. @@ -277,11 +277,11 @@ private static Term convertTerm(final fr.lirmm.graphik.graal.api.core.Term term, /** * Converts a {@link List} of {@link fr.lirmm.graphik.graal.api.core.Term Graal - * Terms} into a {@link List} of {@link Term VLog4J Terms}. + * Terms} into a {@link List} of {@link Term Rulewerk Terms}. * * @param terms A {@link List} of {@link fr.lirmm.graphik.graal.api.core.Term * Graal Terms} - * @return A {@link List} of {@link Term VLog4J Terms} + * @return A {@link List} of {@link Term Rulewerk Terms} */ private static List convertTerms(final List terms, final Set existentialVariables) { diff --git a/rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverterTest.java b/rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToRulewerkModelConverterTest.java similarity index 69% rename from rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverterTest.java rename to rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToRulewerkModelConverterTest.java index e522de86d..b6c3cd88b 100644 --- a/rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverterTest.java +++ b/rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToRulewerkModelConverterTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -46,7 +46,7 @@ /** * @author Adrian Bielefeldt */ -public class GraalToVLog4JModelConverterTest { +public class GraalToRulewerkModelConverterTest { private final String socrate = "socrate"; private final String redsBike = "redsBike"; @@ -61,18 +61,18 @@ public class GraalToVLog4JModelConverterTest { private final String y = "Y"; private final String z = "Z"; - private final Constant vlog4j_socrate = Expressions.makeAbstractConstant(this.socrate); + private final Constant rulewerk_socrate = Expressions.makeAbstractConstant(this.socrate); - private final Predicate vlog4j_bicycle = Expressions.makePredicate(this.bicycle, 1); - private final Predicate vlog4j_hasPart = Expressions.makePredicate(this.hasPart, 2); - private final Predicate vlog4j_human = Expressions.makePredicate(this.human, 1); - private final Predicate vlog4j_mortal = Expressions.makePredicate(this.mortal, 1); - private final Predicate vlog4j_wheel = Expressions.makePredicate(this.wheel, 1); + private final Predicate rulewerk_bicycle = Expressions.makePredicate(this.bicycle, 1); + private final Predicate rulewerk_hasPart = Expressions.makePredicate(this.hasPart, 2); + private final Predicate rulewerk_human = Expressions.makePredicate(this.human, 1); + private final Predicate rulewerk_mortal = Expressions.makePredicate(this.mortal, 1); + private final Predicate rulewerk_wheel = Expressions.makePredicate(this.wheel, 1); - private final Variable vlog4j_x = Expressions.makeUniversalVariable(this.x); - private final Variable vlog4j_y = Expressions.makeUniversalVariable(this.y); - private final Variable vlog4j_z = Expressions.makeUniversalVariable(this.z); - private final Variable vlog4j_ex_y = Expressions.makeExistentialVariable(this.y); + private final Variable rulewerk_x = Expressions.makeUniversalVariable(this.x); + private final Variable rulewerk_y = Expressions.makeUniversalVariable(this.y); + private final Variable rulewerk_z = Expressions.makeUniversalVariable(this.z); + private final Variable rulewerk_ex_y = Expressions.makeExistentialVariable(this.y); private final DefaultTermFactory termFactory = new DefaultTermFactory(); @@ -98,50 +98,50 @@ public class GraalToVLog4JModelConverterTest { @Test public void testConvertAtom() throws ParseException { - final PositiveLiteral vlog4j_atom = Expressions.makePositiveLiteral(this.vlog4j_human, this.vlog4j_socrate); + final PositiveLiteral rulewerk_atom = Expressions.makePositiveLiteral(this.rulewerk_human, this.rulewerk_socrate); final fr.lirmm.graphik.graal.api.core.Atom graal_atom = new DefaultAtom(this.graal_human, this.graal_socrate); - assertEquals(vlog4j_atom, GraalToVLog4JModelConverter.convertAtom(graal_atom, Collections.emptySet())); + assertEquals(rulewerk_atom, GraalToRulewerkModelConverter.convertAtom(graal_atom, Collections.emptySet())); - final PositiveLiteral vlog4j_atom_2 = Expressions.makePositiveLiteral(this.vlog4j_hasPart, this.vlog4j_x, - this.vlog4j_socrate); + final PositiveLiteral rulewerk_atom_2 = Expressions.makePositiveLiteral(this.rulewerk_hasPart, this.rulewerk_x, + this.rulewerk_socrate); final fr.lirmm.graphik.graal.api.core.Atom graal_atom_2 = new DefaultAtom(this.graal_hasPart, this.graal_x, this.graal_socrate); - assertEquals(vlog4j_atom_2, GraalToVLog4JModelConverter.convertAtom(graal_atom_2, Collections.emptySet())); + assertEquals(rulewerk_atom_2, GraalToRulewerkModelConverter.convertAtom(graal_atom_2, Collections.emptySet())); } @Test public void testConvertFact() throws ParseException { - final Fact vlog4j_atom = Expressions.makeFact(this.vlog4j_human, Arrays.asList(this.vlog4j_socrate)); + final Fact rulewerk_atom = Expressions.makeFact(this.rulewerk_human, Arrays.asList(this.rulewerk_socrate)); final fr.lirmm.graphik.graal.api.core.Atom graal_atom = new DefaultAtom(this.graal_human, this.graal_socrate); - assertEquals(vlog4j_atom, GraalToVLog4JModelConverter.convertAtomToFact(graal_atom)); + assertEquals(rulewerk_atom, GraalToRulewerkModelConverter.convertAtomToFact(graal_atom)); } @Test public void testConvertRule() throws ParseException { // moral(X) :- human(X) - final PositiveLiteral vlog4j_mortal_atom = Expressions.makePositiveLiteral(this.vlog4j_mortal, this.vlog4j_x); - final PositiveLiteral vlog4j_human_atom = Expressions.makePositiveLiteral(this.vlog4j_human, this.vlog4j_x); - final Rule vlog4j_rule = Expressions.makeRule(vlog4j_mortal_atom, vlog4j_human_atom); + final PositiveLiteral rulewerk_mortal_atom = Expressions.makePositiveLiteral(this.rulewerk_mortal, this.rulewerk_x); + final PositiveLiteral rulewerk_human_atom = Expressions.makePositiveLiteral(this.rulewerk_human, this.rulewerk_x); + final Rule rulewerk_rule = Expressions.makeRule(rulewerk_mortal_atom, rulewerk_human_atom); final fr.lirmm.graphik.graal.api.core.Atom graal_mortal_atom = new DefaultAtom(this.graal_mortal, this.graal_x); final fr.lirmm.graphik.graal.api.core.Atom graal_human_atom = new DefaultAtom(this.graal_human, this.graal_x); final fr.lirmm.graphik.graal.api.core.Rule graal_rule = new DefaultRule(new LinkedListAtomSet(graal_human_atom), new LinkedListAtomSet(graal_mortal_atom)); - assertEquals(vlog4j_rule, GraalToVLog4JModelConverter.convertRule(graal_rule)); + assertEquals(rulewerk_rule, GraalToRulewerkModelConverter.convertRule(graal_rule)); } @Test public void testConvertExistentialRule() throws ParseException { // hasPart(X, Y), wheel(Y) :- bicycle(X) - final PositiveLiteral vlog4j_hasPart_atom = Expressions.makePositiveLiteral(this.vlog4j_hasPart, this.vlog4j_x, - this.vlog4j_ex_y); - final PositiveLiteral vlog4j_wheel_atom = Expressions.makePositiveLiteral(this.vlog4j_wheel, this.vlog4j_ex_y); - final PositiveLiteral vlog4j_bicycle_atom = Expressions.makePositiveLiteral(this.vlog4j_bicycle, this.vlog4j_x); - final Rule vlog4j_rule = Expressions.makeRule( - Expressions.makePositiveConjunction(vlog4j_hasPart_atom, vlog4j_wheel_atom), - Expressions.makeConjunction(vlog4j_bicycle_atom)); + final PositiveLiteral rulewerk_hasPart_atom = Expressions.makePositiveLiteral(this.rulewerk_hasPart, this.rulewerk_x, + this.rulewerk_ex_y); + final PositiveLiteral rulewerk_wheel_atom = Expressions.makePositiveLiteral(this.rulewerk_wheel, this.rulewerk_ex_y); + final PositiveLiteral rulewerk_bicycle_atom = Expressions.makePositiveLiteral(this.rulewerk_bicycle, this.rulewerk_x); + final Rule rulewerk_rule = Expressions.makeRule( + Expressions.makePositiveConjunction(rulewerk_hasPart_atom, rulewerk_wheel_atom), + Expressions.makeConjunction(rulewerk_bicycle_atom)); final fr.lirmm.graphik.graal.api.core.Atom graal_hasPart_atom = new DefaultAtom(this.graal_hasPart, this.graal_x, this.graal_y); @@ -151,7 +151,7 @@ public void testConvertExistentialRule() throws ParseException { final fr.lirmm.graphik.graal.api.core.Rule graal_rule = new DefaultRule( new LinkedListAtomSet(graal_bicycle_atom), new LinkedListAtomSet(graal_hasPart_atom, graal_wheel_atom)); - assertEquals(vlog4j_rule, GraalToVLog4JModelConverter.convertRule(graal_rule)); + assertEquals(rulewerk_rule, GraalToRulewerkModelConverter.convertRule(graal_rule)); } @Test @@ -159,16 +159,16 @@ public void testConvertQuery() throws ParseException { // ?(X) :- mortal(X) final String mortalQuery = "mortalQuery"; final PositiveLiteral query = Expressions.makePositiveLiteral(Expressions.makePredicate(mortalQuery, 1), - this.vlog4j_x); + this.rulewerk_x); final Rule queryRule = Expressions.makeRule(query, - Expressions.makePositiveLiteral(this.vlog4j_mortal, this.vlog4j_x)); + Expressions.makePositiveLiteral(this.rulewerk_mortal, this.rulewerk_x)); final fr.lirmm.graphik.graal.api.core.Atom graal_query_atom = new DefaultAtom(this.graal_mortal, this.graal_x); final ConjunctiveQuery graal_query = new DefaultConjunctiveQuery(new LinkedListAtomSet(graal_query_atom), Arrays.asList(this.graal_x)); - final GraalConjunctiveQueryToRule importedQuery = GraalToVLog4JModelConverter.convertQuery(mortalQuery, + final GraalConjunctiveQueryToRule importedQuery = GraalToRulewerkModelConverter.convertQuery(mortalQuery, graal_query); assertEquals(query, importedQuery.getQuery()); assertEquals(queryRule, importedQuery.getRule()); @@ -204,21 +204,21 @@ public void testConvertQuery() throws ParseException { graal_predicate4_atom), Arrays.asList(this.graal_x, this.graal_x, this.graal_y)); - final GraalConjunctiveQueryToRule importedComplexQuery = GraalToVLog4JModelConverter.convertQuery(complexQuery, + final GraalConjunctiveQueryToRule importedComplexQuery = GraalToRulewerkModelConverter.convertQuery(complexQuery, graal_complex_query); final PositiveLiteral expectedComplexQueryAtom = Expressions.makePositiveLiteral( - Expressions.makePredicate(complexQuery, 3), this.vlog4j_x, this.vlog4j_x, this.vlog4j_y); - final PositiveLiteral vlog4j_predicate1_atom = Expressions - .makePositiveLiteral(Expressions.makePredicate(predicate1, 1), this.vlog4j_x); - final PositiveLiteral vlog4j_predicate2_atom = Expressions - .makePositiveLiteral(Expressions.makePredicate(predicate2, 2), this.vlog4j_y, this.vlog4j_x); - final PositiveLiteral vlog4j_predicate3_atom = Expressions.makePositiveLiteral( - Expressions.makePredicate(predicate3, 2), this.vlog4j_y, Expressions.makeAbstractConstant(stockholm)); - final PositiveLiteral vlog4j_predicate4_atom = Expressions.makePositiveLiteral( - Expressions.makePredicate(predicate4, 3), this.vlog4j_x, this.vlog4j_y, this.vlog4j_z); - final Rule expectedComplexQueryRule = Expressions.makeRule(expectedComplexQueryAtom, vlog4j_predicate1_atom, - vlog4j_predicate2_atom, vlog4j_predicate3_atom, vlog4j_predicate4_atom); + Expressions.makePredicate(complexQuery, 3), this.rulewerk_x, this.rulewerk_x, this.rulewerk_y); + final PositiveLiteral rulewerk_predicate1_atom = Expressions + .makePositiveLiteral(Expressions.makePredicate(predicate1, 1), this.rulewerk_x); + final PositiveLiteral rulewerk_predicate2_atom = Expressions + .makePositiveLiteral(Expressions.makePredicate(predicate2, 2), this.rulewerk_y, this.rulewerk_x); + final PositiveLiteral rulewerk_predicate3_atom = Expressions.makePositiveLiteral( + Expressions.makePredicate(predicate3, 2), this.rulewerk_y, Expressions.makeAbstractConstant(stockholm)); + final PositiveLiteral rulewerk_predicate4_atom = Expressions.makePositiveLiteral( + Expressions.makePredicate(predicate4, 3), this.rulewerk_x, this.rulewerk_y, this.rulewerk_z); + final Rule expectedComplexQueryRule = Expressions.makeRule(expectedComplexQueryAtom, rulewerk_predicate1_atom, + rulewerk_predicate2_atom, rulewerk_predicate3_atom, rulewerk_predicate4_atom); assertEquals(expectedComplexQueryAtom, importedComplexQuery.getQuery()); @@ -232,7 +232,7 @@ public void testConvertQueryExceptionNoVariables() { this.graal_socrate); final ConjunctiveQuery graal_query_without_answer_variables = new DefaultConjunctiveQuery( new LinkedListAtomSet(graal_atom), new ArrayList<>()); - GraalToVLog4JModelConverter.convertQuery("name", graal_query_without_answer_variables); + GraalToRulewerkModelConverter.convertQuery("name", graal_query_without_answer_variables); } @Test(expected = GraalConvertException.class) @@ -240,7 +240,7 @@ public void testConvertQueryExceptionEmptyBody() { final ConjunctiveQuery graal_query_without_body = new DefaultConjunctiveQuery(new LinkedListAtomSet(), Arrays.asList(this.graal_y)); - GraalToVLog4JModelConverter.convertQuery("name", graal_query_without_body); + GraalToRulewerkModelConverter.convertQuery("name", graal_query_without_body); } @Test(expected = GraalConvertException.class) @@ -252,6 +252,6 @@ public void testConvertQueryExceptionBlankPredicate() { final ConjunctiveQuery graal_query = new DefaultConjunctiveQuery( new LinkedListAtomSet(graal_atom_1, graal_atom_2), Arrays.asList(this.graal_z)); - GraalToVLog4JModelConverter.convertQuery(" ", graal_query); + GraalToRulewerkModelConverter.convertQuery(" ", graal_query); } } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java index ee2a687f1..5843f1db7 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -78,7 +78,7 @@ public class ParserConfiguration { * production of the rules grammar, corresponding to some {@link DataSource} * type. * - * @see + * @see * the grammar. * * @param name Name of the data source, as it appears in the declaring diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParsingException.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParsingException.java index abca75072..802cafe03 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParsingException.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParsingException.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,9 +20,9 @@ * #L% */ -import org.semanticweb.rulewerk.core.exceptions.VLog4jException; +import org.semanticweb.rulewerk.core.exceptions.RulewerkException; -public class ParsingException extends VLog4jException { +public class ParsingException extends RulewerkException { private static final long serialVersionUID = 2849123381757026724L; public ParsingException(String message) { diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj index b630085bb..0d633b1ad 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj @@ -1,8 +1,8 @@ /*- * #%L - * vlog4j-parser + * rulewerk-parser * %% - * Copyright (C) 2018 - 2020 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. From a7ad0ab286d53d85eb69768030e9688c162bf404 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Mar 2020 15:41:40 +0100 Subject: [PATCH 0569/1003] Make sure all exceptions extend the appropriate superclass Fixup --- .../exceptions/RulewerkRuntimeException.java | 8 ++ .../core/model/implementation/Serializer.java | 13 ++-- .../reasoner/implementation/VLogReasoner.java | 73 ++++++++++--------- .../implementation/VLogReasonerNegation.java | 7 +- .../VLogReasonerSparqlInput.java | 17 +++-- .../rulewerk/examples/ExamplesUtils.java | 9 ++- .../owlapi/OwlAxiomToRulesConverter.java | 31 ++++---- .../OwlFeatureNotSupportedException.java | 14 ++-- .../owlapi/OwlToRulesConversionHelper.java | 35 +++++---- .../rulewerk/rdf/RdfValueToTermConverter.java | 7 +- 10 files changed, 112 insertions(+), 102 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkRuntimeException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkRuntimeException.java index 609e0f882..9ff8dca3a 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkRuntimeException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkRuntimeException.java @@ -33,6 +33,10 @@ public class RulewerkRuntimeException extends RuntimeException { */ private static final long serialVersionUID = -6574826887294416900L; + public RulewerkRuntimeException(Throwable cause) { + super(cause); + } + public RulewerkRuntimeException(String message, Throwable cause) { super(message, cause); } @@ -40,4 +44,8 @@ public RulewerkRuntimeException(String message, Throwable cause) { public RulewerkRuntimeException(String message) { super(message); } + + public RulewerkRuntimeException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { + super(message, cause, enableSuppression, writableStackTrace); + } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index bbccf9e1b..0a2668cd9 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -1,12 +1,5 @@ package org.semanticweb.rulewerk.core.model.implementation; -import java.util.List; -import java.util.Map.Entry; -import java.util.function.Function; - -import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; -import org.semanticweb.rulewerk.core.model.api.AbstractConstant; - /*- * #%L * Rulewerk Core Components @@ -27,8 +20,12 @@ * #L% */ +import java.util.List; +import java.util.Map.Entry; +import java.util.function.Function; + +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; import org.semanticweb.rulewerk.core.model.api.Conjunction; -import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java index bfc8ab05a..00b774ef0 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java @@ -1,5 +1,25 @@ package org.semanticweb.rulewerk.core.reasoner.implementation; +/* + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import java.io.IOException; import java.io.OutputStream; import java.util.ArrayList; @@ -12,6 +32,7 @@ import org.apache.commons.lang3.Validate; import org.semanticweb.rulewerk.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.rulewerk.core.exceptions.ReasonerStateException; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.DataSource; import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; import org.semanticweb.rulewerk.core.model.api.Fact; @@ -46,26 +67,6 @@ import karmaresearch.vlog.VLog; import karmaresearch.vlog.VLog.CyclicCheckResult; -/* - * #%L - * Rulewerk Core Components - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - /** * Reasoner implementation using the VLog backend. * @@ -205,9 +206,9 @@ void loadVLogDataSources(final VLogKnowledgeBase vLogKB) throws IOException { try { this.vLog.start(vLogKB.getVLogDataSourcesConfigurationString(), false); } catch (final AlreadyStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); + throw new RulewerkRuntimeException("Inconsistent reasoner state.", e); } catch (final EDBConfigurationException e) { - throw new RuntimeException("Invalid data sources configuration.", e); + throw new RulewerkRuntimeException("Invalid data sources configuration.", e); } } @@ -224,7 +225,7 @@ void loadInMemoryDataSource(final DataSource dataSource, final Predicate predica try { load(predicate, inMemoryDataSource); } catch (final EDBConfigurationException e) { - throw new RuntimeException("Invalid data sources configuration!", e); + throw new RulewerkRuntimeException("Invalid data sources configuration!", e); } } } @@ -279,7 +280,7 @@ void validateDataSourcePredicateArity(Predicate predicate, DataSource dataSource throw new IncompatiblePredicateArityException(predicate, dataSourcePredicateArity, dataSource); } } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state!", e); + throw new RulewerkRuntimeException("Inconsistent reasoner state!", e); } } @@ -299,7 +300,7 @@ void loadFacts(final VLogKnowledgeBase vLogKB) { } } } catch (final EDBConfigurationException e) { - throw new RuntimeException("Invalid data sources configuration!", e); + throw new RulewerkRuntimeException("Invalid data sources configuration!", e); } }); @@ -317,7 +318,7 @@ void loadRules(final VLogKnowledgeBase vLogKB) { } } } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state!", e); + throw new RulewerkRuntimeException("Inconsistent reasoner state!", e); } } @@ -361,11 +362,11 @@ private void runChase() { this.reasoningCompleted = this.vLog.materialize(skolemChase, this.timeoutAfterSeconds); } } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); + throw new RulewerkRuntimeException("Inconsistent reasoner state.", e); } catch (final MaterializationException e) { // FIXME: the message generated here is not guaranteed to be the correct // interpretation of the exception that is caught - throw new RuntimeException( + throw new RulewerkRuntimeException( "Knowledge base incompatible with stratified negation: either the Rules are not stratifiable, or the variables in negated atom cannot be bound.", e); } @@ -390,7 +391,7 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNul try { stringQueryResultIterator = this.vLog.query(vLogAtom, true, filterBlanks); } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); + throw new RulewerkRuntimeException("Inconsistent reasoner state.", e); } catch (final NonExistingPredicateException e1) { LOGGER.warn("Query uses predicate " + query.getPredicate() + " that does not occur in the knowledge base. Answer must be empty!"); @@ -412,7 +413,7 @@ public QueryAnswerCount countQueryAnswers(PositiveLiteral query, boolean include try { result = this.vLog.querySize(vLogAtom, true, filterBlanks); } catch (NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); + throw new RulewerkRuntimeException("Inconsistent reasoner state.", e); } catch (NonExistingPredicateException e) { LOGGER.warn("Query uses predicate " + query.getPredicate() + " that does not occur in the knowledge base. Answer must be empty!"); @@ -435,7 +436,7 @@ public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final St try { this.vLog.writeQueryResultsToCsv(vLogAtom, csvFilePath, filterBlanks); } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state!", e); + throw new RulewerkRuntimeException("Inconsistent reasoner state!", e); } catch (final NonExistingPredicateException e1) { LOGGER.warn("Query uses predicate " + query.getPredicate() + " that does not occur in the knowledge base. Answers are therefore empty."); @@ -475,9 +476,9 @@ public Correctness writeInferences(OutputStream stream) throws IOException { .getBytes()); } } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); + throw new RulewerkRuntimeException("Inconsistent reasoner state.", e); } catch (final NonExistingPredicateException e1) { - throw new RuntimeException("Inconsistent knowledge base state.", e1); + throw new RulewerkRuntimeException("Inconsistent knowledge base state.", e1); } } @@ -562,7 +563,7 @@ public boolean isMFC() { try { checkCyclic = this.vLog.checkCyclic("MFC"); } catch (final NotStartedException e) { - throw new RuntimeException(e.getMessage(), e); // should be impossible + throw new RulewerkRuntimeException(e.getMessage(), e); // should be impossible } return checkCyclic.equals(CyclicCheckResult.CYCLIC); } @@ -643,7 +644,7 @@ private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) { try { load(); } catch (final IOException e) { // FIXME: quick fix for https://github.com/knowsys/rulewerk/issues/128 - throw new RuntimeException(e); + throw new RulewerkRuntimeException(e); } } @@ -651,7 +652,7 @@ private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) { try { checkCyclic = this.vLog.checkCyclic(acyclNotion.name()); } catch (final NotStartedException e) { - throw new RuntimeException(e.getMessage(), e); // should be impossible + throw new RulewerkRuntimeException(e.getMessage(), e); // should be impossible } return checkCyclic.equals(CyclicCheckResult.NON_CYCLIC); } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerNegation.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerNegation.java index ecbc3ab07..39f802725 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerNegation.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerNegation.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -28,6 +28,7 @@ import java.util.Arrays; import org.junit.Test; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.Literal; import org.semanticweb.rulewerk.core.model.api.Fact; @@ -59,7 +60,7 @@ public class VLogReasonerNegation { private final Fact pEF = Expressions.makeFact("P", Arrays.asList(e, f)); private final Fact qCD = Expressions.makeFact("Q", Arrays.asList(c, d)); - @Test(expected = RuntimeException.class) + @Test(expected = RulewerkRuntimeException.class) public void testNotStratifiable() throws IOException { final PositiveLiteral qXY = Expressions.makePositiveLiteral("Q", x, y); diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerSparqlInput.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerSparqlInput.java index b95c1b005..238d488a1 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerSparqlInput.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerSparqlInput.java @@ -1,8 +1,5 @@ package org.semanticweb.rulewerk.core.reasoner.implementation; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - /*- * #%L * Rulewerk Core Components @@ -12,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -23,6 +20,9 @@ * #L% */ +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + import java.io.IOException; import java.net.URL; import java.util.Arrays; @@ -31,6 +31,7 @@ import org.junit.Ignore; import org.junit.Test; import org.semanticweb.rulewerk.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.QueryResult; import org.semanticweb.rulewerk.core.model.api.Variable; @@ -43,7 +44,7 @@ public class VLogReasonerSparqlInput { /** * Tests the query "SELECT ?b ?a WHERE {?a p:P22 ?b}" - * + * * @throws ReasonerStateException * @throws EdbIdbSeparationException * @throws IOException @@ -104,7 +105,7 @@ public void testSimpleSparqlQueryHttps() throws IOException { /** * Tests the query "SELECT ?b ?a WHERE {?a p:P22 ?b .}" - * + * * @throws ReasonerStateException * @throws EdbIdbSeparationException * @throws IOException @@ -136,7 +137,7 @@ public void testSimpleSparqlQuery2() throws IOException { } @Ignore // Ignored during CI because it makes lengthy calls to remote servers - @Test(expected = RuntimeException.class) + @Test(expected = RulewerkRuntimeException.class) public void testConjunctiveQueryNewLineCharacterInQueryBody() throws IOException { final URL endpoint = new URL("https://query.wikidata.org/sparql"); final LinkedHashSet queryVariables = new LinkedHashSet<>( diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java index f379970ba..5149abc41 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -29,6 +29,7 @@ import org.apache.log4j.Logger; import org.apache.log4j.PatternLayout; import org.semanticweb.rulewerk.core.exceptions.ReasonerStateException; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.Expressions; @@ -58,7 +59,7 @@ private ExamplesUtils() { * restrict the logging messages that are shown on the console or to change * their formatting. See the documentation of Log4J for details on how to do * this. - * + * * Note: The VLog C++ backend performs its own logging. The log-level for this * can be configured using * {@link Reasoner#setLogLevel(org.semanticweb.rulewerk.core.reasoner.LogLevel)}. @@ -107,7 +108,7 @@ public static void printOutQueryAnswers(final String queryString, final Reasoner final PositiveLiteral query = RuleParser.parsePositiveLiteral(queryString); printOutQueryAnswers(query, reasoner); } catch (final ParsingException e) { - throw new RuntimeException(e.getMessage(), e); + throw new RulewerkRuntimeException(e.getMessage(), e); } } diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java index 6564a0a8c..2f10b7099 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java @@ -1,14 +1,5 @@ package org.semanticweb.rulewerk.owlapi; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.stream.Collectors; - -import org.semanticweb.owlapi.apibinding.OWLManager; - /*- * #%L * Rulewerk OWL API Support @@ -18,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -29,6 +20,14 @@ * #L% */ +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.model.OWLAsymmetricObjectPropertyAxiom; import org.semanticweb.owlapi.model.OWLAxiomVisitor; import org.semanticweb.owlapi.model.OWLClassAssertionAxiom; @@ -127,7 +126,7 @@ Variable getFreshExistentialVariable() { * simply dropped. Formulas that have only positive atoms (empty body) are * transformed into one or more facts. All other cases lead to a single rule * being added. - * + * * @param converter */ void addRule(final AbstractClassToRuleConverter converter) { @@ -167,7 +166,7 @@ Term replaceTerm(Term term, Term oldTerm, Term newTerm) { PositiveLiteralImpl makeTermReplacedLiteral(Literal literal, Term oldTerm, Term newTerm) { if (literal.isNegated()) { - throw new RuntimeException("Nonmonotonic negation of literals is not handled in OWL conversion."); + throw new OwlFeatureNotSupportedException("Nonmonotonic negation of literals is not handled in OWL conversion."); } return new PositiveLiteralImpl(literal.getPredicate(), literal.getTerms().map(term -> replaceTerm(term, oldTerm, newTerm)).collect(Collectors.toList())); @@ -178,12 +177,12 @@ PositiveLiteralImpl makeTermReplacedLiteral(Literal literal, Term oldTerm, Term * rules are renamings of class expressions, based on auxiliary class names * (unary predicates). The given term is the term used in this auxiliary * predicate. - * + * * Variables used in auxiliary atoms can be existentially quantified, but the * corresponding variable in auxiliary rules must always be universally * quantified. Therefore, if the given term is an existential variable, the * method will replace it by a universal one of the same name. - * + * * @param head * @param body * @param auxTerm @@ -216,7 +215,7 @@ void startAxiomConversion() { * buffers, and finally creating a rule from the collected body and head. The * conversions may lead to auxiliary rules being created during processing, so * additional rules besides the one that is added here might be created. - * + * * @param subClass * @param superClass */ diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlFeatureNotSupportedException.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlFeatureNotSupportedException.java index 4f1841ebc..06f02adca 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlFeatureNotSupportedException.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlFeatureNotSupportedException.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,23 +20,25 @@ * #L% */ +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; + /** * Exception that indicates that the translation of OWL into rules has failed * due to an expressive feature of OWL that cannot be captured in rules. - * + * * @author Markus Krötzsch * */ -public class OwlFeatureNotSupportedException extends RuntimeException { +public class OwlFeatureNotSupportedException extends RulewerkRuntimeException { /** - * + * */ private static final long serialVersionUID = -194716185012512419L; /** * Creates a new exception. - * + * * @param cause * message explaining the error */ diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java index aa02ca07a..43c4c1acb 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java @@ -1,14 +1,5 @@ package org.semanticweb.rulewerk.owlapi; -import java.io.UnsupportedEncodingException; -import java.math.BigInteger; -import java.security.MessageDigest; -import java.security.NoSuchAlgorithmException; -import java.util.Arrays; -import java.util.Collection; - -import org.semanticweb.owlapi.model.OWLAnonymousIndividual; - /*- * #%L * Rulewerk OWL API Support @@ -18,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -28,20 +19,28 @@ * limitations under the License. * #L% */ +import java.io.UnsupportedEncodingException; +import java.math.BigInteger; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.Arrays; +import java.util.Collection; +import org.semanticweb.owlapi.model.OWLAnonymousIndividual; import org.semanticweb.owlapi.model.OWLClass; import org.semanticweb.owlapi.model.OWLClassExpression; import org.semanticweb.owlapi.model.OWLIndividual; import org.semanticweb.owlapi.model.OWLNamedIndividual; import org.semanticweb.owlapi.model.OWLObjectProperty; import org.semanticweb.owlapi.model.OWLObjectPropertyExpression; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.Term; -import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; import org.semanticweb.rulewerk.core.model.implementation.FactImpl; +import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; import org.semanticweb.rulewerk.core.model.implementation.PositiveLiteralImpl; import org.semanticweb.rulewerk.core.model.implementation.PredicateImpl; import org.semanticweb.rulewerk.owlapi.AbstractClassToRuleConverter.SimpleConjunction; @@ -49,7 +48,7 @@ /** * Utility class for helper functions that are used to convert OWL API objects * to rules. - * + * * @author Markus Kroetzsch * */ @@ -57,7 +56,7 @@ public class OwlToRulesConversionHelper { /** * Returns a {@link Term} to represent an {@link OWLIndividual} in rules. - * + * * @param owlIndividual the individual to get a term for * @return a suitable term */ @@ -74,7 +73,7 @@ public static Term getIndividualTerm(final OWLIndividual owlIndividual) { /** * Returns a {@link Predicate} to represent an {@link OWLClass} in rules. - * + * * @param owlClass the atomic class to get a predicate for * @return a suitable unary predicate */ @@ -85,7 +84,7 @@ public static Predicate getClassPredicate(final OWLClass owlClass) { /** * Returns a {@link Predicate} to represent an {@link OWLObjectProperty} in * rules. - * + * * @param owlObjectProperty the atomic property to get a predicate for * @return a suitable binary predicate */ @@ -104,7 +103,7 @@ public static Predicate getAuxiliaryClassPredicate(final Collection Date: Mon, 2 Mar 2020 15:42:04 +0100 Subject: [PATCH 0570/1003] Fix javadoc --- pom.xml | 2 +- .../model/api/PrefixDeclarationRegistry.java | 8 +-- .../rulewerk/core/reasoner/Reasoner.java | 49 ++++++++++--------- .../LocalPrefixDeclarationRegistry.java | 12 ++++- 4 files changed, 41 insertions(+), 30 deletions(-) diff --git a/pom.xml b/pom.xml index 8ec79cb32..aa884b190 100644 --- a/pom.xml +++ b/pom.xml @@ -315,7 +315,7 @@ ${maven.javadoc.version} 1.8 - Rulewerk homepage]]> + Rulewerk homepage]]> diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java index 1d24f1daa..b9dc7386a 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -87,9 +87,9 @@ public interface PrefixDeclarationRegistry extends Iterable @@ -387,11 +388,13 @@ default QueryAnswerCount countQueryAnswers(final PositiveLiteral query) { * explicit facts materialised by the reasoner.
        * An answer to the query is the term set of a fact that matches the * {@code query}: the fact predicate is the same as the {@code query} predicate, - * the {@link TermType#CONSTANT} terms of the {@code query} appear in the answer - * fact at the same term position, and the {@link TermType#VARIABLE} terms of - * the {@code query} are matched by terms in the fact, either named - * ({@link TermType#CONSTANT}) or anonymous ({@link TermType#NAMED_NULL}). The - * same variable name identifies the same term in the answer fact.
        + * the {@link TermType#ABSTRACT_CONSTANT}, {@link TermType#DATATYPE_CONSTANT} + * and {@link TermType#LANGSTRING_CONSTANT} terms of the {@code query} appear in + * the answer fact at the same term position, and the + * {@link TermType#UNIVERSAL_VARIABLE} terms of the {@code query} are matched by + * terms in the fact, either named (any of the three constant types) or + * anonymous ({@link TermType#NAMED_NULL}). The same variable name identifies + * the same term in the answer fact.
        * * Depending on the state of the reasoning (materialisation) and its * {@link KnowledgeBase}, the answers can have a different {@link Correctness} diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java index 72e7d654a..dbedc4f97 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -39,12 +39,20 @@ final public class LocalPrefixDeclarationRegistry extends AbstractPrefixDeclarat */ private String fallbackIri; + /** + * Construct a Prefix declaration registry without an inherited base IRI. In + * this case, we default to {@value PrefixDeclarationRegistry#EMPTY_BASE}. + */ public LocalPrefixDeclarationRegistry() { this(PrefixDeclarationRegistry.EMPTY_BASE); // empty string encodes: "no base" (use relative IRIs) } /** + * Construct a Prefix declaration registry with a base IRI inherited from the + * importing file. * + * @param fallbackIri the IRI to use as a base if none is set by the imported + * file itself (i.e., if {@link #setBaseIri} is not called). */ public LocalPrefixDeclarationRegistry(String fallbackIri) { super(); From 79e430e200a869342356c7f614339c4115f04073 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Mar 2020 16:35:12 +0100 Subject: [PATCH 0571/1003] README: Fix reference to maven central --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 14a23a5e3..f950b8107 100644 --- a/README.md +++ b/README.md @@ -9,12 +9,12 @@ A Java library based on the [VLog rule engine](https://github.com/karmaresearch/ Installation ------------ -The current release of Rulewerk is version 0.5.0. The easiest way of using the library is with Maven. Maven users must add the following dependency to the dependencies in their pom.xml file: +The current release of Rulewerk is version 0.5.0 and was released as *vlog4j* (all future releases will be published as *rulewerk*). The easiest way of using the library is with Maven. Maven users must add the following dependency to the dependencies in their pom.xml file: ``` - org.semanticweb.rulewerk - rulewerk-core + org.semanticweb.vlog4j + vlog4j-core 0.5.0 ``` From 8a4b1f79ae0de3ed9b24b78eae352e3122e6cd71 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Mar 2020 16:52:02 +0100 Subject: [PATCH 0572/1003] Core: Change back to vlog-base --- rulewerk-core/pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rulewerk-core/pom.xml b/rulewerk-core/pom.xml index 991c14904..35b7e5ea4 100644 --- a/rulewerk-core/pom.xml +++ b/rulewerk-core/pom.xml @@ -24,7 +24,7 @@ ${project.groupId} - rulewerk-base + vlog-base ${karmaresearch.vlog.version} @@ -50,7 +50,7 @@ ${project.groupId} - rulewerk-base + vlog-base ${karmaresearch.vlog.version} jar ./lib/jvlog-local.jar From 367cf52f35b0ac9b1c0b69ec0e3bc12cb52a1ce1 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 2 Mar 2020 17:36:17 +0100 Subject: [PATCH 0573/1003] handle named nulls in vlog conversion --- .../reasoner/implementation/ModelToVLogConverter.java | 10 +++++++++- .../reasoner/implementation/TermToVLogConverter.java | 10 ++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java index c146179c9..57db01a7d 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java @@ -27,6 +27,7 @@ import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.NamedNull; import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.Rule; import org.semanticweb.rulewerk.core.model.api.Term; @@ -80,7 +81,14 @@ static String[] toVLogFactTuple(final Fact fact) { int i = 0; for (final Term term : terms) { // No checks for type of term -- only constants allowed in facts! - vLogFactTuple[i] = TermToVLogConverter.getVLogNameForConstant((Constant)term); + if (term instanceof Constant) { + vLogFactTuple[i] = TermToVLogConverter.getVLogNameForConstant((Constant) term); + } else if (term instanceof NamedNull) { + vLogFactTuple[i] = TermToVLogConverter.getVLogNameForNamedNull((NamedNull) term); + } else { + throw new RuntimeException("Terms in facts must be constants of named nulls. Encountered " + term + + " of type " + term.getType() + "."); + } i++; } return vLogFactTuple; diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java index 5fe7d568d..340e5ebd3 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java @@ -85,6 +85,16 @@ public static String getVLogNameForConstant(Constant constant) { return constant.getName(); } } + + /** + * Converts the given named null to the name of a constant in VLog. + * + * @param named nul + * @return VLog constant string + */ + public static String getVLogNameForNamedNull(NamedNull namedNull) { + return "skolem__" + namedNull.getName(); + } /** * Converts the string representation of a constant in Rulewerk directly to the From d46c5497c295b2b317399589667e49553507200c Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 2 Mar 2020 17:37:23 +0100 Subject: [PATCH 0574/1003] typo --- .../core/reasoner/implementation/TermToVLogConverter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java index 340e5ebd3..5b50cb606 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java @@ -89,7 +89,7 @@ public static String getVLogNameForConstant(Constant constant) { /** * Converts the given named null to the name of a constant in VLog. * - * @param named nul + * @param named null * @return VLog constant string */ public static String getVLogNameForNamedNull(NamedNull namedNull) { From 665f0a194be5f6f88f241f73bd7866d25294f68f Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 2 Mar 2020 17:44:39 +0100 Subject: [PATCH 0575/1003] Clarified documentation --- .../core/reasoner/implementation/TermToVLogConverter.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java index 5b50cb606..c143490c5 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java @@ -85,9 +85,9 @@ public static String getVLogNameForConstant(Constant constant) { return constant.getName(); } } - + /** - * Converts the given named null to the name of a constant in VLog. + * Converts the given named null to the name of a (skolem) constant in VLog. * * @param named null * @return VLog constant string @@ -132,8 +132,8 @@ public karmaresearch.vlog.Term visit(ExistentialVariable term) { } /** - * Transforms a named null to a {@link karmaresearch.vlog.Term} with the same name - * and type {@link karmaresearch.vlog.Term.TermType#BLANK}. + * Transforms a named null to a {@link karmaresearch.vlog.Term} with the same + * name and type {@link karmaresearch.vlog.Term.TermType#BLANK}. */ @Override public karmaresearch.vlog.Term visit(NamedNull term) { From 73a5ab61149836ad0ae9983571da8159de564235 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 2 Mar 2020 17:46:51 +0100 Subject: [PATCH 0576/1003] Test the skolemisation of blanks in facts --- .../implementation/ModelToVLogConverterTest.java | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java index 8ff7491e2..ee030eac3 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java @@ -117,6 +117,15 @@ public void testToVLogTermBlank() { assertEquals(expectedVLogTerm, vLogTerm); } + @Test + public void testToVLogTermBlankSkolemization() { + final NamedNull blank = new NamedNullImpl("blank"); + + final String vLogSkolemConstant = TermToVLogConverter.getVLogNameForNamedNull(blank); + + assertEquals("skolem__blank", vLogSkolemConstant); + } + @Test public void testToVLogTermArray() { final Variable vx = Expressions.makeUniversalVariable("x"); From 2734cd079cc575a8f9e4116bb3a679960b6cc785 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 2 Mar 2020 17:49:03 +0100 Subject: [PATCH 0577/1003] added fixme on potential issues with this solution --- .../core/reasoner/implementation/TermToVLogConverter.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java index c143490c5..eb1b6255f 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java @@ -89,6 +89,9 @@ public static String getVLogNameForConstant(Constant constant) { /** * Converts the given named null to the name of a (skolem) constant in VLog. * + * @fixme This skolemisation approach might lead to constants that clash with + * existing constant names. + * * @param named null * @return VLog constant string */ From 329b249f9a5dc0fb50aeff3bf635ce8c59a17a5f Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 2 Mar 2020 21:59:11 +0100 Subject: [PATCH 0578/1003] Turn unlikely exception into RuntimeEx --- .../reasoner/implementation/Skolemization.java | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java index aaa9d1f2f..d11d26849 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java @@ -24,6 +24,7 @@ import java.io.IOException; import java.util.UUID; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.NamedNull; import org.semanticweb.rulewerk.core.model.implementation.RenamedNamedNull; @@ -47,15 +48,17 @@ public class Skolemization { * a {@link RenamedNamedNull} instance with the same name when * called on the same instance. * - * @throws IOException when ByteArrayOutputStream throws. * @return a {@link RenamedNamedNull} instance with a new name * that is specific to this instance and {@code name}. */ - public RenamedNamedNull skolemizeNamedNull(String name) throws IOException { + public RenamedNamedNull skolemizeNamedNull(String name) { ByteArrayOutputStream stream = new ByteArrayOutputStream(); - stream.write(namedNullNamespace); - stream.write(name.getBytes()); - - return new RenamedNamedNull(UUID.nameUUIDFromBytes(stream.toByteArray())); + try { + stream.write(namedNullNamespace); + stream.write(name.getBytes()); + return new RenamedNamedNull(UUID.nameUUIDFromBytes(stream.toByteArray())); + } catch (IOException e) { + throw new RulewerkRuntimeException(e.getMessage(), e); + } } } From f50620ada099da6bde48094c45f1118adcc066dc Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 2 Mar 2020 21:59:38 +0100 Subject: [PATCH 0579/1003] Throw Rulewerk exception --- .../core/reasoner/implementation/ModelToVLogConverter.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java index 57db01a7d..ae7f59597 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java @@ -23,6 +23,7 @@ import java.util.Collection; import java.util.List; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.Conjunction; import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.Fact; @@ -86,8 +87,8 @@ static String[] toVLogFactTuple(final Fact fact) { } else if (term instanceof NamedNull) { vLogFactTuple[i] = TermToVLogConverter.getVLogNameForNamedNull((NamedNull) term); } else { - throw new RuntimeException("Terms in facts must be constants of named nulls. Encountered " + term - + " of type " + term.getType() + "."); + throw new RulewerkRuntimeException("Terms in facts must be constants or named nulls. Encountered " + + term + " of type " + term.getType() + "."); } i++; } From bd76d8032ba7e2e4aa171ec8f588be642b989520 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 2 Mar 2020 22:00:07 +0100 Subject: [PATCH 0580/1003] Use Skolemization; support RenamedNamedNulls --- .../implementation/TermToVLogConverter.java | 12 ++++++++---- .../implementation/ModelToVLogConverterTest.java | 16 +++++++++++++++- 2 files changed, 23 insertions(+), 5 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java index eb1b6255f..64bc83db3 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java @@ -29,6 +29,7 @@ import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; import org.semanticweb.rulewerk.core.model.api.TermVisitor; import org.semanticweb.rulewerk.core.model.api.UniversalVariable; +import org.semanticweb.rulewerk.core.model.implementation.RenamedNamedNull; /** * A visitor that converts {@link Term}s of different types to corresponding @@ -39,6 +40,8 @@ */ class TermToVLogConverter implements TermVisitor { + static final Skolemization skolemization = new Skolemization(); + /** * Transforms an abstract constant to a {@link karmaresearch.vlog.Term} with the * same name and type {@link karmaresearch.vlog.Term.TermType#CONSTANT}. @@ -89,14 +92,15 @@ public static String getVLogNameForConstant(Constant constant) { /** * Converts the given named null to the name of a (skolem) constant in VLog. * - * @fixme This skolemisation approach might lead to constants that clash with - * existing constant names. - * * @param named null * @return VLog constant string */ public static String getVLogNameForNamedNull(NamedNull namedNull) { - return "skolem__" + namedNull.getName(); + if (namedNull instanceof RenamedNamedNull) { + return namedNull.getName(); + } else { + return skolemization.skolemizeNamedNull(namedNull.getName()).getName(); + } } /** diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java index ee030eac3..9bdfdb02e 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java @@ -22,12 +22,14 @@ import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.UUID; import org.junit.Test; import org.semanticweb.rulewerk.core.model.api.NamedNull; @@ -39,6 +41,7 @@ import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.api.Variable; import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.model.implementation.RenamedNamedNull; import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.reasoner.RuleRewriteStrategy; @@ -123,7 +126,18 @@ public void testToVLogTermBlankSkolemization() { final String vLogSkolemConstant = TermToVLogConverter.getVLogNameForNamedNull(blank); - assertEquals("skolem__blank", vLogSkolemConstant); + assertNotEquals("blank", vLogSkolemConstant); + assertEquals(36,vLogSkolemConstant.length()); // length of a UUID + } + + @Test + public void testToVLogTermBlankRenamedSkolemization() { + final UUID uuid = UUID.randomUUID(); + final NamedNull blank = new RenamedNamedNull(uuid); + + final String vLogSkolemConstant = TermToVLogConverter.getVLogNameForNamedNull(blank); + + assertEquals(uuid.toString(), vLogSkolemConstant); } @Test From 1b5d064a2caa9313ba63dc499bb9782dcb7d1cb2 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 2 Mar 2020 22:02:26 +0100 Subject: [PATCH 0581/1003] Remove unused exception handling --- .../rulewerk/parser/javacc/JavaCCParserBase.java | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java index f61a80218..2ec72e8de 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java @@ -162,11 +162,7 @@ Constant createConstant(String lexicalForm, String datatype) throws ParseExcepti } NamedNull createNamedNull(String lexicalForm) throws ParseException { - try { - return this.skolemization.skolemizeNamedNull(lexicalForm); - } catch (IOException e) { - throw makeParseExceptionWithCause("Failed to generate a unique name for named null", e); - } + return this.skolemization.skolemizeNamedNull(lexicalForm); } void addStatement(Statement statement) { From 05099f43f732caa5791771da942cf98ac135cc22 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 2 Mar 2020 22:02:51 +0100 Subject: [PATCH 0582/1003] -unused import --- .../org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java | 1 - 1 file changed, 1 deletion(-) diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java index 2ec72e8de..3e38aefde 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java @@ -20,7 +20,6 @@ * #L% */ -import java.io.IOException; import java.util.HashSet; import java.util.List; From 9cde81bff290ba1ec922db5d6e7517345a09441e Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 2 Mar 2020 22:25:06 +0100 Subject: [PATCH 0583/1003] Also use Skolemization for bnodes in OWL --- .../owlapi/ClassToRuleBodyConverter.java | 2 +- .../owlapi/ClassToRuleHeadConverter.java | 2 +- .../owlapi/OwlAxiomToRulesConverter.java | 25 ++++++++++++++----- .../owlapi/OwlToRulesConversionHelper.java | 6 ++--- .../rulewerk/owlapi/OwlToRulesConverter.java | 4 +-- 5 files changed, 26 insertions(+), 13 deletions(-) diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleBodyConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleBodyConverter.java index 11818553f..12ab8434c 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleBodyConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleBodyConverter.java @@ -117,7 +117,7 @@ public void visit(final OWLObjectAllValuesFrom ce) { @Override public void visit(final OWLObjectHasValue ce) { - final Term term = OwlToRulesConversionHelper.getIndividualTerm(ce.getFiller()); + final Term term = OwlToRulesConversionHelper.getIndividualTerm(ce.getFiller(), parent.skolemization); OwlToRulesConversionHelper.addConjunctForPropertyExpression(ce.getProperty(), this.mainTerm, term, this.body); } diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleHeadConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleHeadConverter.java index 2857286c6..6e87333d6 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleHeadConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleHeadConverter.java @@ -112,7 +112,7 @@ public void visit(final OWLObjectAllValuesFrom ce) { @Override public void visit(final OWLObjectHasValue ce) { - final Term term = OwlToRulesConversionHelper.getIndividualTerm(ce.getFiller()); + final Term term = OwlToRulesConversionHelper.getIndividualTerm(ce.getFiller(), parent.skolemization); OwlToRulesConversionHelper.addConjunctForPropertyExpression(ce.getProperty(), this.mainTerm, term, this.head); } diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java index 2f10b7099..80390026e 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java @@ -82,6 +82,7 @@ import org.semanticweb.rulewerk.core.model.implementation.PositiveLiteralImpl; import org.semanticweb.rulewerk.core.model.implementation.RuleImpl; import org.semanticweb.rulewerk.core.model.implementation.UniversalVariableImpl; +import org.semanticweb.rulewerk.core.reasoner.implementation.Skolemization; /** * Class for converting OWL axioms to rules. @@ -91,6 +92,8 @@ */ public class OwlAxiomToRulesConverter implements OWLAxiomVisitor { + Skolemization skolemization = new Skolemization(); + static OWLDataFactory owlDataFactory = OWLManager.getOWLDataFactory(); final Set rules = new HashSet<>(); @@ -98,6 +101,15 @@ public class OwlAxiomToRulesConverter implements OWLAxiomVisitor { final Variable frontierVariable = new UniversalVariableImpl("X"); int freshVariableCounter = 0; + /** + * Changes the renaming function for blank node IDs. Blank nodes with the same + * local ID will be represented differently before and after this function is + * called, but will retain a constant interpretation otherwise. + */ + public void startNewBlankNodeContext() { + skolemization = new Skolemization(); + } + /** * Returns a fresh universal variable, which can be used as auxiliary variable * in the current axiom's translation. @@ -166,7 +178,8 @@ Term replaceTerm(Term term, Term oldTerm, Term newTerm) { PositiveLiteralImpl makeTermReplacedLiteral(Literal literal, Term oldTerm, Term newTerm) { if (literal.isNegated()) { - throw new OwlFeatureNotSupportedException("Nonmonotonic negation of literals is not handled in OWL conversion."); + throw new OwlFeatureNotSupportedException( + "Nonmonotonic negation of literals is not handled in OWL conversion."); } return new PositiveLiteralImpl(literal.getPredicate(), literal.getTerms().map(term -> replaceTerm(term, oldTerm, newTerm)).collect(Collectors.toList())); @@ -242,8 +255,8 @@ public void visit(final OWLSubClassOfAxiom axiom) { @Override public void visit(final OWLNegativeObjectPropertyAssertionAxiom axiom) { - final Term subject = OwlToRulesConversionHelper.getIndividualTerm(axiom.getSubject()); - final Term object = OwlToRulesConversionHelper.getIndividualTerm(axiom.getObject()); + final Term subject = OwlToRulesConversionHelper.getIndividualTerm(axiom.getSubject(), skolemization); + final Term object = OwlToRulesConversionHelper.getIndividualTerm(axiom.getObject(), skolemization); final Literal atom = OwlToRulesConversionHelper.getObjectPropertyAtom(axiom.getProperty(), subject, object); final PositiveLiteral bot = OwlToRulesConversionHelper.getBottom(subject); this.rules.add(Expressions.makeRule(bot, atom)); @@ -346,8 +359,8 @@ public void visit(final OWLObjectPropertyRangeAxiom axiom) { @Override public void visit(final OWLObjectPropertyAssertionAxiom axiom) { - final Term subject = OwlToRulesConversionHelper.getIndividualTerm(axiom.getSubject()); - final Term object = OwlToRulesConversionHelper.getIndividualTerm(axiom.getObject()); + final Term subject = OwlToRulesConversionHelper.getIndividualTerm(axiom.getSubject(), skolemization); + final Term object = OwlToRulesConversionHelper.getIndividualTerm(axiom.getObject(), skolemization); this.facts.add(OwlToRulesConversionHelper.getObjectPropertyFact(axiom.getProperty(), subject, object)); } @@ -409,7 +422,7 @@ public void visit(final OWLClassAssertionAxiom axiom) { void visitClassAssertionAxiom(final OWLIndividual individual, final OWLClassExpression classExpression) { this.startAxiomConversion(); - final Term term = OwlToRulesConversionHelper.getIndividualTerm(individual); + final Term term = OwlToRulesConversionHelper.getIndividualTerm(individual, skolemization); final ClassToRuleHeadConverter headConverter = new ClassToRuleHeadConverter(term, this); classExpression.accept(headConverter); this.addRule(headConverter); diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java index 43c4c1acb..802161334 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java @@ -40,9 +40,9 @@ import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; import org.semanticweb.rulewerk.core.model.implementation.FactImpl; -import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; import org.semanticweb.rulewerk.core.model.implementation.PositiveLiteralImpl; import org.semanticweb.rulewerk.core.model.implementation.PredicateImpl; +import org.semanticweb.rulewerk.core.reasoner.implementation.Skolemization; import org.semanticweb.rulewerk.owlapi.AbstractClassToRuleConverter.SimpleConjunction; /** @@ -60,11 +60,11 @@ public class OwlToRulesConversionHelper { * @param owlIndividual the individual to get a term for * @return a suitable term */ - public static Term getIndividualTerm(final OWLIndividual owlIndividual) { + public static Term getIndividualTerm(final OWLIndividual owlIndividual, Skolemization skolemization) { if (owlIndividual instanceof OWLNamedIndividual) { return new AbstractConstantImpl(((OWLNamedIndividual) owlIndividual).getIRI().toString()); } else if (owlIndividual instanceof OWLAnonymousIndividual) { - return new NamedNullImpl(((OWLAnonymousIndividual) owlIndividual).getID().toString()); + return skolemization.skolemizeNamedNull(((OWLAnonymousIndividual) owlIndividual).getID().toString()); } else { throw new OwlFeatureNotSupportedException( "Could not convert OWL individual '" + owlIndividual.toString() + "' to a term."); diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverter.java index 4273bae87..f13f724fe 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverter.java @@ -40,10 +40,10 @@ public class OwlToRulesConverter { * Converts the given OWL ontology to rules and facts, and adds the result to * the internal buffer of rules and facts for later retrieval. * - * @param owlOntology - * the ontology + * @param owlOntology the ontology */ public void addOntology(final OWLOntology owlOntology) { + this.owlAxiomToRulesConverter.startNewBlankNodeContext(); owlOntology.axioms().forEach(owlAxiom -> owlAxiom.accept(this.owlAxiomToRulesConverter)); } From 14e29a3625d507a8e290fa9259acaf2a4ba77b4d Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 2 Mar 2020 22:27:28 +0100 Subject: [PATCH 0584/1003] Remove unused imports --- .../implementation/VLogReasonerWriteInferencesTest.java | 3 --- 1 file changed, 3 deletions(-) diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java index 24e747cdb..99c6a68f4 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java @@ -9,14 +9,11 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Set; -import java.util.Map.Entry; import java.util.stream.Collectors; import java.util.stream.Stream; import org.junit.Before; import org.junit.Test; -import org.mockito.internal.util.collections.Sets; import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; import org.semanticweb.rulewerk.core.model.api.AbstractConstant; import org.semanticweb.rulewerk.core.model.api.Conjunction; From 7915c71ae2329359fdd1caa4f7d49d4d25ba0327 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 2 Mar 2020 22:51:30 +0100 Subject: [PATCH 0585/1003] Test new cases --- .../ModelToVLogConverterTest.java | 42 ++++++++++++++++++- 1 file changed, 40 insertions(+), 2 deletions(-) diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java index 9bdfdb02e..b88e5e3ef 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java @@ -33,14 +33,17 @@ import org.junit.Test; import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.StatementVisitor; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.api.Variable; import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.model.implementation.PositiveLiteralImpl; import org.semanticweb.rulewerk.core.model.implementation.RenamedNamedNull; import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.reasoner.RuleRewriteStrategy; @@ -127,9 +130,9 @@ public void testToVLogTermBlankSkolemization() { final String vLogSkolemConstant = TermToVLogConverter.getVLogNameForNamedNull(blank); assertNotEquals("blank", vLogSkolemConstant); - assertEquals(36,vLogSkolemConstant.length()); // length of a UUID + assertEquals(36, vLogSkolemConstant.length()); // length of a UUID } - + @Test public void testToVLogTermBlankRenamedSkolemization() { final UUID uuid = UUID.randomUUID(); @@ -187,6 +190,41 @@ public void testToVLogFactTuples() { assertArrayEquals(expectedTuples, vLogTuples); } + @Test + public void testToVLogFactTupleNulls() { + final UUID uuid = UUID.randomUUID(); + final NamedNull n = new RenamedNamedNull(uuid); + final Fact atom1 = Expressions.makeFact("p1", Arrays.asList(n)); + + final String[] expectedTuple = { uuid.toString() }; + + final String[] actualTuple = ModelToVLogConverter.toVLogFactTuple(atom1); + + assertArrayEquals(expectedTuple, actualTuple); + } + + @Test(expected = RulewerkRuntimeException.class) + public void testToVLogFactTupleUnsupported() { + // We need a fact that accepts exception-causing terms in the first place: + class NonValidatingFact extends PositiveLiteralImpl implements Fact { + + public NonValidatingFact(Predicate predicate, List terms) { + super(predicate, terms); + } + + @Override + public T accept(StatementVisitor statementVisitor) { + return statementVisitor.visit(this); + } + + } + + final Variable x = Expressions.makeUniversalVariable("X"); + final Fact atom1 = new NonValidatingFact(Expressions.makePredicate("p1", 1), Arrays.asList(x)); + + ModelToVLogConverter.toVLogFactTuple(atom1); + } + @Test public void testToVLogPredicate() { final Predicate predicate = Expressions.makePredicate("pred", 1); From 3a115debdce2d29fd14b9584267001621897c082 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 2 Mar 2020 23:23:01 +0100 Subject: [PATCH 0586/1003] Test bnode diversification across ontologies --- .../owlapi/OwlAxiomToRulesConverterTest.java | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverterTest.java b/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverterTest.java index d561b779c..1024bcab3 100644 --- a/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverterTest.java +++ b/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverterTest.java @@ -634,16 +634,16 @@ public void testObjectPropertyRange() { assertEquals(Collections.singleton(rule), converter.rules); } - + /* * A \sqsubseteq <1 .R */ @Test(expected = OwlFeatureNotSupportedException.class) public void testSubClassOfMaxCardinality() { - + OWLClassExpression maxCard = df.getOWLObjectMaxCardinality(1, pR); - OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(cA, maxCard ); - + OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(cA, maxCard); + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); axiom.accept(converter); } @@ -695,7 +695,7 @@ public void testNominalsInConjunctionLeftSubClassOfClass() { final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); axiom.accept(converter); } - + /* * (B \sqcap {a,b}) \sqsubseteq A */ @@ -710,9 +710,8 @@ public void testNominalsInConjunctionRightSubClassOfClass() { axiom.accept(converter); } - /* - * A \sqsubseteq (B \sqcap {a,b}) + * A \sqsubseteq (B \sqcap {a,b}) */ @Test(expected = OwlFeatureNotSupportedException.class) public void testClassSubClassOfNominalsInConjunctionRight() { @@ -724,7 +723,6 @@ public void testClassSubClassOfNominalsInConjunctionRight() { axiom.accept(converter); } - /* * A \sqsubseteq {a} */ From 37c4dfbc36f10d260232d60e1bc6f98a22ef10b8 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 2 Mar 2020 23:23:19 +0100 Subject: [PATCH 0587/1003] Test bnode diversification across ontologies --- .../owlapi/OwlToRulesConverterTest.java | 48 +++++++++++++++++++ 1 file changed, 48 insertions(+) create mode 100644 rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverterTest.java diff --git a/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverterTest.java b/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverterTest.java new file mode 100644 index 000000000..88281145e --- /dev/null +++ b/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverterTest.java @@ -0,0 +1,48 @@ +package org.semanticweb.rulewerk.owlapi; + +import static org.junit.Assert.*; + +import java.util.Arrays; + +import org.junit.Test; +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.model.IRI; +import org.semanticweb.owlapi.model.OWLAnonymousIndividual; +import org.semanticweb.owlapi.model.OWLAxiom; +import org.semanticweb.owlapi.model.OWLClass; +import org.semanticweb.owlapi.model.OWLDataFactory; +import org.semanticweb.owlapi.model.OWLIndividual; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyCreationException; + +public class OwlToRulesConverterTest { + + static OWLDataFactory df = OWLManager.getOWLDataFactory(); + + public static IRI getIri(final String localName) { + return IRI.create("http://example.org/" + localName); + } + + public static OWLClass getOwlClass(final String localName) { + return df.getOWLClass(getIri(localName)); + } + + static final OWLClass cC = getOwlClass("C"); + static final OWLIndividual inda = df.getOWLNamedIndividual(getIri("a")); + + @Test + public void testLoadOntologies() throws OWLOntologyCreationException { + final OWLAnonymousIndividual bnode = df.getOWLAnonymousIndividual("abc"); + final OWLAxiom Cn = df.getOWLClassAssertionAxiom(cC, bnode); + final OWLAxiom Ca = df.getOWLClassAssertionAxiom(cC, inda); + + final OWLOntology ontology = OWLManager.createOWLOntologyManager().createOntology(Arrays.asList(Cn,Ca)); + + final OwlToRulesConverter converter = new OwlToRulesConverter(); + converter.addOntology(ontology); + converter.addOntology(ontology); + + assertEquals(3, converter.getFacts().size()); + } + +} From 83dde1b2eb913aec6f66d01a533ed100fc851b06 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 3 Mar 2020 17:44:22 +0100 Subject: [PATCH 0588/1003] Core: Fix merging of prefix declarations with an empty base --- .../AbstractPrefixDeclarationRegistry.java | 10 ++++++--- .../MergingPrefixDeclarationRegistry.java | 19 ++++++++++++---- .../MergingPrefixDeclarationRegistryTest.java | 22 +++++++++++++++++++ .../owlapi/OwlToRulesConverterTest.java | 20 +++++++++++++++++ .../LocalPrefixDeclarationRegistry.java | 1 - 5 files changed, 64 insertions(+), 8 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java index 3e9127f74..48afd6a1e 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -44,10 +44,14 @@ public abstract class AbstractPrefixDeclarationRegistry implements PrefixDeclara /** * Iri holding the base namespace. */ - protected String baseUri = PrefixDeclarationRegistry.EMPTY_BASE; + protected String baseUri = null; @Override public String getBaseIri() { + if (baseUri == null) { + baseUri = PrefixDeclarationRegistry.EMPTY_BASE; + } + return baseUri; } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java index 8b6ebe16b..94570bd0a 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java @@ -65,11 +65,21 @@ public MergingPrefixDeclarationRegistry(final PrefixDeclarationRegistry prefixDe */ @Override public void setBaseIri(String baseIri) { - if (baseIri != this.baseUri && this.baseUri != PrefixDeclarationRegistry.EMPTY_BASE) { - prefixes.put(getFreshPrefix(), this.baseUri); + if (baseIri == this.baseUri) { + return; } - this.baseUri = baseIri; + if (this.baseUri == null) { + this.baseUri = baseIri; + } else if (this.baseUri == PrefixDeclarationRegistry.EMPTY_BASE) { + // we need to keep the empty base, so that we don't + // accidentally relativise absolute Iris to + // baseIri. Hence, introduce baseIri as a fresh prefix. + prefixes.put(getFreshPrefix(), baseIri); + } else { + prefixes.put(getFreshPrefix(), this.baseUri); + this.baseUri = baseIri; + } } /** @@ -96,8 +106,9 @@ public void setPrefixIri(String prefixName, String prefixIri) { */ public String unresolveAbsoluteIri(String iri) { Map matches = new HashMap<>(); + String baseIri = getBaseIri(); - if (baseUri != PrefixDeclarationRegistry.EMPTY_BASE && iri.startsWith(baseUri) && !iri.equals(baseUri)) { + if (baseIri != PrefixDeclarationRegistry.EMPTY_BASE && iri.startsWith(baseIri) && !iri.equals(baseIri)) { matches.put(iri.replaceFirst(baseUri, PrefixDeclarationRegistry.EMPTY_BASE), baseUri.length()); } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java index 131d5a712..9f06ee6d2 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java @@ -21,6 +21,7 @@ */ import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; import org.junit.Before; import org.junit.Test; @@ -185,4 +186,25 @@ public void unresolveAbsoluteIri_resolvePrefixedName_doesRoundTrip() throws Pref String resolved = prefixDeclarations.resolvePrefixedName(unresolved); assertEquals(unresolved, prefixDeclarations.unresolveAbsoluteIri(resolved)); } + + @Test + public void unresolveAbsoluteIri_relativeIriAfterMergeWithNewBase_staysRelative() throws PrefixDeclarationException { + String relativeIri = this.prefixDeclarations.absolutizeIri(RELATIVE); + PrefixDeclarationRegistry prefixDeclarations = new MergingPrefixDeclarationRegistry(); + prefixDeclarations.setBaseIri(BASE); + this.prefixDeclarations.mergePrefixDeclarations(prefixDeclarations); + assertEquals(relativeIri, this.prefixDeclarations.unresolveAbsoluteIri(relativeIri)); + } + + @Test + public void unresolveAbsoluteIri_absoluteIriMergedOntoEmptyBase_staysAbsolute() throws PrefixDeclarationException { + assertEquals("", this.prefixDeclarations.getBaseIri()); + PrefixDeclarationRegistry prefixDeclarations = new MergingPrefixDeclarationRegistry(); + prefixDeclarations.setBaseIri(BASE); + String absoluteIri = prefixDeclarations.absolutizeIri(RELATIVE); + this.prefixDeclarations.mergePrefixDeclarations(prefixDeclarations); + String resolvedIri = this.prefixDeclarations.unresolveAbsoluteIri(absoluteIri); + assertNotEquals(RELATIVE, resolvedIri); + assertEquals("rulewerk_generated_0:" + RELATIVE, resolvedIri); + } } diff --git a/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverterTest.java b/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverterTest.java index 88281145e..7b6fd9533 100644 --- a/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverterTest.java +++ b/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverterTest.java @@ -1,5 +1,25 @@ package org.semanticweb.rulewerk.owlapi; +/*- + * #%L + * Rulewerk OWL API Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import static org.junit.Assert.*; import java.util.Arrays; diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java index dbedc4f97..a72def47d 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java @@ -57,7 +57,6 @@ public LocalPrefixDeclarationRegistry() { public LocalPrefixDeclarationRegistry(String fallbackIri) { super(); this.fallbackIri = fallbackIri; - this.baseUri = null; } /** From 6015e752bb5eef4c9473d9c5345f4605da9ce088 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Mar 2020 18:24:39 +0100 Subject: [PATCH 0589/1003] Core: Make FileDataSource serialization OS-independent Fixes #140. --- RELEASE-NOTES.md | 2 + .../core/model/implementation/Serializer.java | 2 +- .../implementation/CsvFileDataSource.java | 11 ++--- .../implementation/FileDataSource.java | 46 +++++++++++++------ .../implementation/RdfFileDataSource.java | 8 ++-- .../core/model/DataSourceDeclarationTest.java | 21 ++++----- .../implementation/AddDataSourceTest.java | 24 +++++----- .../implementation/CsvFileDataSourceTest.java | 26 +++++------ .../FileDataSourceTestUtils.java | 8 ++-- .../implementation/RdfFileDataSourceTest.java | 14 +++--- .../VLogReasonerCombinedInputs.java | 12 ++--- .../implementation/VLogReasonerCsvInput.java | 22 ++++----- .../implementation/VLogReasonerRdfInput.java | 18 ++++---- .../implementation/VLogReasonerStateTest.java | 7 ++- .../examples/graal/DoidExampleGraal.java | 7 ++- .../CsvFileDataSourceDeclarationHandler.java | 11 ++--- .../RdfFileDataSourceDeclarationHandler.java | 11 ++--- .../parser/RuleParserDataSourceTest.java | 15 +++--- 18 files changed, 134 insertions(+), 131 deletions(-) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 5d2119244..159b43158 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -9,6 +9,8 @@ Breaking changes: * In the example package, `ExamplesUtils.getQueryAnswerCount(queryString, reasoner)` does no longer exist. It can be replaced by `reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral(queryString)).getCount()` +* The `FileDataSource` constructor and those of derived classes now + take the path to a file instead of `File` object. New features: * Counting query answers is more efficient now, using `Reasoner.countQueryAnswers()` diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index 0a2668cd9..d83045815 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -380,7 +380,7 @@ public static String getString(final SparqlQueryResultDataSource dataSource) { } private static String getFileString(final FileDataSource fileDataSource) { - return getString(fileDataSource.getFile().toString()); + return getString(fileDataSource.getPath().toString()); } private static String getIRIString(final String string) { diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java index 83aff537a..7b7812b4c 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -24,7 +24,6 @@ import java.io.IOException; import java.util.Arrays; -import org.eclipse.jdt.annotation.NonNull; import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** @@ -57,15 +56,15 @@ public class CsvFileDataSource extends FileDataSource { /** * Constructor. * - * @param csvFile a file of a {@code .csv} or {@code .csv.gz} extension and a - * valid CSV format. + * @param csvFile path to a file of a {@code .csv} or {@code .csv.gz} extension + * and a valid CSV format. * @throws IOException if the path of the given {@code csvFile} is * invalid. * @throws IllegalArgumentException if the extension of the given * {@code csvFile} does not occur in * {@link #possibleExtensions}. */ - public CsvFileDataSource(@NonNull final File csvFile) throws IOException { + public CsvFileDataSource(final String csvFile) throws IOException { super(csvFile, possibleExtensions); } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java index 4d79ae3a2..d65bc7af1 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -28,7 +28,6 @@ import java.util.stream.StreamSupport; import org.apache.commons.lang3.Validate; -import org.eclipse.jdt.annotation.NonNull; /** * A {@code FileDataSource} is an abstract implementation of a storage for fact @@ -44,6 +43,8 @@ public abstract class FileDataSource extends VLogDataSource { private final static String DATASOURCE_TYPE_CONFIG_VALUE = "INMEMORY"; private final File file; + private final String filePath; + private final String fileName; private final String extension; /** * The canonical path to the parent directory where the file resides. @@ -54,7 +55,8 @@ public abstract class FileDataSource extends VLogDataSource { /** * Constructor. * - * @param file a file that will serve as storage for fact terms. + * @param filePath path to a file that will serve as storage for fact + * terms. * @param possibleExtensions a list of extensions that the files could have * @throws IOException if the path of the given {@code file} is * invalid. @@ -62,24 +64,24 @@ public abstract class FileDataSource extends VLogDataSource { * does not occur in * {@code possibleExtensions}. */ - public FileDataSource(@NonNull final File file, final Iterable possibleExtensions) throws IOException { - Validate.notNull(file, "Data source file cannot be null!"); - final String fileName = file.getName(); - - this.file = file; + public FileDataSource(final String filePath, final Iterable possibleExtensions) throws IOException { + Validate.notBlank(filePath, "Data source file name cannot be null!"); + + this.file = new File(filePath); + this.filePath = filePath.replaceAll("\\\\", "/"); // canonicalise windows-style path separators + this.fileName = this.filePath.substring(this.filePath.lastIndexOf("/") + 1); // just the file name + this.extension = getValidExtension(this.fileName, possibleExtensions); + this.fileNameWithoutExtension = this.fileName.substring(0, this.fileName.lastIndexOf(this.extension)); this.dirCanonicalPath = Paths.get(file.getCanonicalPath()).getParent().toString(); - this.extension = getValidExtension(file, possibleExtensions); - this.fileNameWithoutExtension = fileName.substring(0, fileName.lastIndexOf(this.extension)); } - private String getValidExtension(final File file, final Iterable possibleExtensions) { - final String fileName = file.getName(); + private String getValidExtension(final String fileName, final Iterable possibleExtensions) { final Stream extensionsStream = StreamSupport.stream(possibleExtensions.spliterator(), true); - final Optional potentialExtension = extensionsStream.filter(ex -> fileName.endsWith(ex)).findFirst(); + final Optional potentialExtension = extensionsStream.filter(fileName::endsWith).findFirst(); if (!potentialExtension.isPresent()) { throw new IllegalArgumentException("Expected one of the following extensions for the data source file " - + file + ": " + String.join(", ", possibleExtensions) + "."); + + fileName + ": " + String.join(", ", possibleExtensions) + "."); } return potentialExtension.get(); @@ -104,7 +106,16 @@ public File getFile() { return this.file; } + public String getPath() { + return this.filePath; + } + + public String getName() { + return this.fileName; + } + /** + * Canonicalise the file path * * @return The canonical path to the parent directory where the file resides. */ @@ -112,6 +123,11 @@ String getDirCanonicalPath() { return this.dirCanonicalPath; } + /** + * Get the base name of the file, without an extension. + * + * @return the file basename without any extension. + */ String getFileNameWithoutExtension() { return this.fileNameWithoutExtension; } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java index c1274aac2..e56148544 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -55,7 +55,7 @@ public class RdfFileDataSource extends FileDataSource { /** * Constructor. * - * @param rdfFile a file of a {@code .nt} or {@code .nt.gz} extension and a + * @param rdfFile path to a file of a {@code .nt} or {@code .nt.gz} extension and a * valid N-Triples format. * @throws IOException if the path of the given {@code rdfFile} is * invalid. @@ -63,7 +63,7 @@ public class RdfFileDataSource extends FileDataSource { * {@code rdfFile} does not occur in * {@link #possibleExtensions}. */ - public RdfFileDataSource(final File rdfFile) throws IOException { + public RdfFileDataSource(final String rdfFile) throws IOException { super(rdfFile, possibleExtensions); } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java index 32ef82333..7c5ad3cba 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -28,7 +28,6 @@ import java.net.MalformedURLException; import java.net.URL; -import org.junit.Ignore; import org.junit.Test; import org.semanticweb.rulewerk.core.model.api.DataSource; import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; @@ -84,25 +83,23 @@ public void toString_SparqlQueryResultDataSource() throws IOException { @Test public void toString_CsvFileDataSource() throws IOException { final Predicate predicate2 = Expressions.makePredicate("q", 1); - final String relativeDirName = "dir"; + final String relativeDirName = "dir/"; final String fileName = "file.csv"; - final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File(relativeDirName, fileName)); + final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(relativeDirName + fileName); final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate2, unzippedCsvFileDataSource); - final String expectedFilePath = Serializer.getString(relativeDirName + File.separator + fileName); + final String expectedFilePath = Serializer.getString(relativeDirName + fileName); assertEquals("@source q[1]: load-csv(" + expectedFilePath + ") .", dataSourceDeclaration.toString()); } - // FIXME: have String representation of files OS independent - @Ignore @Test public void toString_CsvFileDataSource_absolutePath_windowsPathSeparator() throws IOException { final Predicate predicate = Expressions.makePredicate("q", 1); final String absoluteFilePathWindows = "D:\\input\\file.csv"; - final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File(absoluteFilePathWindows)); + final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(absoluteFilePathWindows); final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, unzippedCsvFileDataSource); assertEquals("@source q[1]: load-csv(\"D:/input/file.csv\") .", dataSourceDeclaration.toString()); @@ -111,14 +108,14 @@ public void toString_CsvFileDataSource_absolutePath_windowsPathSeparator() throw @Test public void toString_RdfFileDataSource_relativePath() throws IOException { final Predicate predicate = Expressions.makePredicate("q", 1); - final String relativeDirName = "dir"; + final String relativeDirName = "dir/"; final String fileName = "file.nt"; - final File unzippedRdfFile = new File(relativeDirName, fileName); + final String unzippedRdfFile = relativeDirName + fileName; final RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(unzippedRdfFile); final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, unzippedRdfFileDataSource); - final String expectedFilePath = Serializer.getString(relativeDirName + File.separator + fileName); + final String expectedFilePath = Serializer.getString(relativeDirName + fileName); assertEquals("@source q[1]: load-rdf(" + expectedFilePath + ") .", dataSourceDeclaration.toString()); } } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java index 87d84362f..ce8ae45ef 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java @@ -12,9 +12,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -64,7 +64,7 @@ public void testAddDataSourceExistentDataForDifferentPredicates() throws IOExcep final Fact factPredicatePArity2 = Expressions.makeFact("p", Arrays.asList(constantA, constantA)); final Fact factPredicateQArity1 = Expressions.makeFact("q", Arrays.asList(constantA)); final Predicate predicateLArity1 = Expressions.makePredicate("l", 1); - final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_c1_c2_PATH)); + final DataSource dataSource = new CsvFileDataSource(CSV_FILE_c1_c2_PATH); final KnowledgeBase kb = new KnowledgeBase(); kb.addStatement(factPredicatePArity2); @@ -93,7 +93,7 @@ public void testAddDataSourceExistentDataForDifferentPredicates() throws IOExcep public void testAddDataSourceBeforeLoading() throws IOException { final Predicate predicateP = Expressions.makePredicate("p", 1); final Predicate predicateQ = Expressions.makePredicate("q", 1); - final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_c1_c2_PATH)); + final DataSource dataSource = new CsvFileDataSource(CSV_FILE_c1_c2_PATH); final KnowledgeBase kb = new KnowledgeBase(); @@ -119,7 +119,7 @@ public void testAddDataSourceBeforeLoading() throws IOException { public void testAddDataSourceAfterLoading() throws IOException { final Predicate predicateP = Expressions.makePredicate("p", 1); final Predicate predicateQ = Expressions.makePredicate("q", 1); - final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_c1_c2_PATH)); + final DataSource dataSource = new CsvFileDataSource(CSV_FILE_c1_c2_PATH); final KnowledgeBase kb = new KnowledgeBase(); @@ -150,7 +150,7 @@ public void testAddDataSourceAfterLoading() throws IOException { public void testAddDataSourceAfterReasoning() throws IOException { final Predicate predicateP = Expressions.makePredicate("p", 1); final Predicate predicateQ = Expressions.makePredicate("q", 1); - final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_c1_c2_PATH)); + final DataSource dataSource = new CsvFileDataSource(CSV_FILE_c1_c2_PATH); final KnowledgeBase kb = new KnowledgeBase(); @@ -181,8 +181,8 @@ public void testAddDataSourceAfterReasoning() throws IOException { @Test public void testAddDataSourceNoMultipleDataSourcesForPredicate() throws IOException { final Predicate predicate = Expressions.makePredicate("p", 1); - final DataSource dataSource1 = new CsvFileDataSource(new File(CSV_FILE_c1_c2_PATH)); - final DataSource dataSource2 = new CsvFileDataSource(new File(CSV_FILE_c_d_PATH)); + final DataSource dataSource1 = new CsvFileDataSource(CSV_FILE_c1_c2_PATH); + final DataSource dataSource2 = new CsvFileDataSource(CSV_FILE_c_d_PATH); final KnowledgeBase kb = new KnowledgeBase(); kb.addStatement(new DataSourceDeclarationImpl(predicate, dataSource1)); @@ -202,7 +202,7 @@ public void testAddDataSourceNoMultipleDataSourcesForPredicate() throws IOExcept @Test public void testAddDataSourceNoFactsForPredicate() throws IOException { final Predicate predicate = Expressions.makePredicate("p", 1); - final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_c1_c2_PATH)); + final DataSource dataSource = new CsvFileDataSource(CSV_FILE_c1_c2_PATH); final Fact fact = Expressions.makeFact(Expressions.makePredicate("p", 1), Arrays.asList(Expressions.makeAbstractConstant("a"))); @@ -222,9 +222,9 @@ public void testAddDataSourceNoFactsForPredicate() throws IOException { @Test public void testAddMultipleDataSourcesForPredicateAfterReasoning() throws IOException { final Predicate predicate = Expressions.makePredicate("p", 1); - final DataSource dataSource1 = new CsvFileDataSource(new File(CSV_FILE_c1_c2_PATH)); + final DataSource dataSource1 = new CsvFileDataSource(CSV_FILE_c1_c2_PATH); final DataSource dataSource2 = new CsvFileDataSource( - new File(FileDataSourceTestUtils.INPUT_FOLDER + "unaryFactsCD.csv")); + FileDataSourceTestUtils.INPUT_FOLDER + "unaryFactsCD.csv"); final KnowledgeBase kb = new KnowledgeBase(); kb.addStatement(new DataSourceDeclarationImpl(predicate, dataSource1)); @@ -247,7 +247,7 @@ public void testAddMultipleDataSourcesForPredicateAfterReasoning() throws IOExce @Test public void testAddDataSourceAndFactsForPredicateAfterReasoning() throws IOException { final Predicate predicate = Expressions.makePredicate("p", 1); - final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_c1_c2_PATH)); + final DataSource dataSource = new CsvFileDataSource(CSV_FILE_c1_c2_PATH); final Fact fact = Expressions.makeFact(Expressions.makePredicate("p", 1), Arrays.asList(Expressions.makeAbstractConstant("a"))); diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java index f9b840f1e..e94173d20 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -41,25 +41,23 @@ public void testConstructorNullFile() throws IOException { @Test(expected = IllegalArgumentException.class) public void testConstructorFalseExtension() throws IOException { - new CsvFileDataSource(new File(ntFile)); + new CsvFileDataSource(ntFile); } @Test public void testConstructor() throws IOException { - final File unzippedCsvFile = new File(csvFile); - final File zippedCsvFile = new File(gzFile); final String dirCanonicalPath = new File(FileDataSourceTestUtils.INPUT_FOLDER).getCanonicalPath(); - final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(unzippedCsvFile); - final CsvFileDataSource zippedCsvFileDataSource = new CsvFileDataSource(zippedCsvFile); + final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(csvFile); + final CsvFileDataSource zippedCsvFileDataSource = new CsvFileDataSource(gzFile); - FileDataSourceTestUtils.testConstructor(unzippedCsvFileDataSource, unzippedCsvFile, dirCanonicalPath, "file"); - FileDataSourceTestUtils.testConstructor(zippedCsvFileDataSource, zippedCsvFile, dirCanonicalPath, "file"); + FileDataSourceTestUtils.testConstructor(unzippedCsvFileDataSource, new File(csvFile).getName(), dirCanonicalPath, "file"); + FileDataSourceTestUtils.testConstructor(zippedCsvFileDataSource, new File(gzFile).getName(), dirCanonicalPath, "file"); } @Test public void testToConfigString() throws IOException { - final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File(csvFile)); - final CsvFileDataSource zippedCsvFileDataSource = new CsvFileDataSource(new File(gzFile)); + final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(csvFile); + final CsvFileDataSource zippedCsvFileDataSource = new CsvFileDataSource(gzFile); final String expectedDirCanonicalPath = new File(FileDataSourceTestUtils.INPUT_FOLDER).getCanonicalPath(); final String expectedConfigString = "EDB%1$d_predname=%2$s\n" + "EDB%1$d_type=INMEMORY\n" + "EDB%1$d_param0=" @@ -71,8 +69,7 @@ public void testToConfigString() throws IOException { @Test public void testNoParentDir() throws IOException { - final File file = new File("file.csv"); - final FileDataSource fileDataSource = new CsvFileDataSource(file); + final FileDataSource fileDataSource = new CsvFileDataSource("file.csv"); final String dirCanonicalPath = fileDataSource.getDirCanonicalPath(); final String currentFolder = new File(".").getCanonicalPath(); assertEquals(currentFolder, dirCanonicalPath); @@ -80,8 +77,7 @@ public void testNoParentDir() throws IOException { @Test public void testNotNormalisedParentDir() throws IOException { - final File file = new File("./././file.csv"); - final FileDataSource fileDataSource = new CsvFileDataSource(file); + final FileDataSource fileDataSource = new CsvFileDataSource("./././file.csv"); final String dirCanonicalPath = fileDataSource.getDirCanonicalPath(); final String currentFolder = new File(".").getCanonicalPath(); assertEquals(currentFolder, dirCanonicalPath); diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java index 74a5215fe..ea714f865 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java @@ -14,9 +14,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -92,9 +92,9 @@ public static List> getCSVContent(final String csvFile) throws IOEx return content; } - public static void testConstructor(final FileDataSource fileDataSource, final File expectedFile, + public static void testConstructor(final FileDataSource fileDataSource, final String expectedFileName, final String expectedDirCanonicalPath, final String expectedFileNameWithoutExtension) throws IOException { - assertEquals(expectedFile, fileDataSource.getFile()); + assertEquals(expectedFileName, fileDataSource.getName()); assertEquals(expectedDirCanonicalPath, fileDataSource.getDirCanonicalPath()); assertEquals(expectedFileNameWithoutExtension, fileDataSource.getFileNameWithoutExtension()); } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java index c5baf8bde..70e8b4657 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java @@ -11,9 +11,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -30,8 +30,8 @@ public class RdfFileDataSourceTest { - private final File unzippedRdfFile = new File(FileDataSourceTestUtils.INPUT_FOLDER + "file.nt"); - private final File zippedRdfFile = new File(FileDataSourceTestUtils.INPUT_FOLDER + "file.nt.gz"); + private final String unzippedRdfFile = FileDataSourceTestUtils.INPUT_FOLDER + "file.nt"; + private final String zippedRdfFile = FileDataSourceTestUtils.INPUT_FOLDER + "file.nt.gz"; @Test(expected = NullPointerException.class) public void testConstructorNullFile() throws IOException { @@ -40,7 +40,7 @@ public void testConstructorNullFile() throws IOException { @Test(expected = IllegalArgumentException.class) public void testConstructorFalseExtension() throws IOException { - new RdfFileDataSource(new File(FileDataSourceTestUtils.INPUT_FOLDER + "file.csv")); + new RdfFileDataSource(FileDataSourceTestUtils.INPUT_FOLDER + "file.csv"); } @Test @@ -50,8 +50,8 @@ public void testConstructor() throws IOException { final RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(unzippedRdfFile); final RdfFileDataSource zippedRdfFileDataSource = new RdfFileDataSource(zippedRdfFile); - FileDataSourceTestUtils.testConstructor(unzippedRdfFileDataSource, unzippedRdfFile, dirCanonicalPath, "file"); - FileDataSourceTestUtils.testConstructor(zippedRdfFileDataSource, zippedRdfFile, dirCanonicalPath, "file"); + FileDataSourceTestUtils.testConstructor(unzippedRdfFileDataSource, new File(unzippedRdfFile).getName(), dirCanonicalPath, "file"); + FileDataSourceTestUtils.testConstructor(zippedRdfFileDataSource, new File(zippedRdfFile).getName(), dirCanonicalPath, "file"); } @Test diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCombinedInputs.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCombinedInputs.java index 6c305e40d..b3f2fba74 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCombinedInputs.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCombinedInputs.java @@ -2,8 +2,6 @@ import static org.junit.Assert.assertEquals; -import java.io.File; - /*- * #%L * Rulewerk Core Components @@ -13,9 +11,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -69,10 +67,10 @@ public class VLogReasonerCombinedInputs { final DataSourceDeclaration qCDFromCsv; public VLogReasonerCombinedInputs() throws IOException { - qFromCsv = new DataSourceDeclarationImpl(q, new CsvFileDataSource(new File( - FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.unzippedUnaryCsvFileRoot + ".csv"))); + qFromCsv = new DataSourceDeclarationImpl(q, new CsvFileDataSource( + FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.unzippedUnaryCsvFileRoot + ".csv")); qCDFromCsv = new DataSourceDeclarationImpl(q, - new CsvFileDataSource(new File(FileDataSourceTestUtils.INPUT_FOLDER + "unaryFactsCD.csv"))); + new CsvFileDataSource(FileDataSourceTestUtils.INPUT_FOLDER + "unaryFactsCD.csv")); } @Test diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvInput.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvInput.java index bbdc37316..3ec10b94f 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvInput.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvInput.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -61,17 +61,17 @@ public void testLoadEmptyCsvFile() throws IOException { final PositiveLiteral queryAtom = Expressions.makePositiveLiteral(unaryPredicate1, this.x); FileDataSourceTestUtils.testLoadEmptyFile(unaryPredicate1, queryAtom, - new CsvFileDataSource(new File(FileDataSourceTestUtils.INPUT_FOLDER + "empty.csv"))); + new CsvFileDataSource(FileDataSourceTestUtils.INPUT_FOLDER + "empty.csv")); FileDataSourceTestUtils.testLoadEmptyFile(unaryPredicate1, queryAtom, - new CsvFileDataSource(new File(FileDataSourceTestUtils.INPUT_FOLDER + "empty.csv.gz"))); + new CsvFileDataSource(FileDataSourceTestUtils.INPUT_FOLDER + "empty.csv.gz")); } @Test public void testLoadUnaryFactsFromCsvFile() throws IOException { - testLoadUnaryFactsFromSingleCsvDataSource(new CsvFileDataSource(new File( - FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.unzippedUnaryCsvFileRoot + ".csv"))); - testLoadUnaryFactsFromSingleCsvDataSource(new CsvFileDataSource(new File( - FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.zippedUnaryCsvFileRoot + ".csv.gz"))); + testLoadUnaryFactsFromSingleCsvDataSource(new CsvFileDataSource( + FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.unzippedUnaryCsvFileRoot + ".csv")); + testLoadUnaryFactsFromSingleCsvDataSource(new CsvFileDataSource( + FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.zippedUnaryCsvFileRoot + ".csv.gz")); } private void testLoadUnaryFactsFromSingleCsvDataSource(final FileDataSource fileDataSource) throws IOException { @@ -107,7 +107,7 @@ private void testLoadUnaryFactsFromSingleCsvDataSource(final FileDataSource file public void testLoadNonexistingCsvFile() throws IOException { final File nonexistingFile = new File("nonexistingFile.csv"); assertFalse(nonexistingFile.exists()); - final FileDataSource fileDataSource = new CsvFileDataSource(nonexistingFile); + final FileDataSource fileDataSource = new CsvFileDataSource(nonexistingFile.getName()); final KnowledgeBase kb = new KnowledgeBase(); kb.addStatement(new DataSourceDeclarationImpl(unaryPredicate1, fileDataSource)); @@ -118,8 +118,8 @@ public void testLoadNonexistingCsvFile() throws IOException { @Test(expected = IncompatiblePredicateArityException.class) public void testLoadCsvFileWrongArity() throws IOException { - final FileDataSource fileDataSource = new CsvFileDataSource(new File( - FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.binaryCsvFileNameRoot + ".csv")); + final FileDataSource fileDataSource = new CsvFileDataSource( + FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.binaryCsvFileNameRoot + ".csv"); final KnowledgeBase kb = new KnowledgeBase(); kb.addStatement(new DataSourceDeclarationImpl(unaryPredicate1, fileDataSource)); diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerRdfInput.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerRdfInput.java index 0d76dc569..f9b52ad44 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerRdfInput.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerRdfInput.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -62,7 +62,7 @@ public class VLogReasonerRdfInput { @Test public void testLoadEmptyRdfFile() throws IOException { FileDataSourceTestUtils.testLoadEmptyFile(ternaryPredicate, queryAtom, - new RdfFileDataSource(new File(FileDataSourceTestUtils.INPUT_FOLDER + "empty.nt"))); + new RdfFileDataSource(FileDataSourceTestUtils.INPUT_FOLDER + "empty.nt")); } @Ignore @@ -71,19 +71,19 @@ public void testLoadEmptyRdfFile() throws IOException { @Test public void testLoadEmptyRdfFileGz() throws IOException { FileDataSourceTestUtils.testLoadEmptyFile(ternaryPredicate, queryAtom, - new RdfFileDataSource(new File(FileDataSourceTestUtils.INPUT_FOLDER + "empty.nt.gz"))); + new RdfFileDataSource(FileDataSourceTestUtils.INPUT_FOLDER + "empty.nt.gz")); } @Test public void testLoadTernaryFactsFromRdfFile() throws IOException { testLoadTernaryFactsFromSingleRdfDataSource(new RdfFileDataSource( - new File(FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.unzippedNtFileRoot + ".nt"))); + FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.unzippedNtFileRoot + ".nt")); } @Test public void testLoadTernaryFactsFromRdfFileGz() throws IOException { testLoadTernaryFactsFromSingleRdfDataSource(new RdfFileDataSource( - new File(FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.zippedNtFileRoot + ".nt.gz"))); + FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.zippedNtFileRoot + ".nt.gz")); } public void testLoadTernaryFactsFromSingleRdfDataSource(final FileDataSource fileDataSource) throws IOException { @@ -104,7 +104,7 @@ public void testLoadTernaryFactsFromSingleRdfDataSource(final FileDataSource fil public void testLoadNonexistingRdfFile() throws IOException { final File nonexistingFile = new File("nonexistingFile.nt"); assertFalse(nonexistingFile.exists()); - final FileDataSource fileDataSource = new RdfFileDataSource(nonexistingFile); + final FileDataSource fileDataSource = new RdfFileDataSource(nonexistingFile.getName()); final KnowledgeBase kb = new KnowledgeBase(); kb.addStatement(new DataSourceDeclarationImpl(ternaryPredicate, fileDataSource)); @@ -115,8 +115,8 @@ public void testLoadNonexistingRdfFile() throws IOException { @Test public void testLoadRdfInvalidFormat() throws IOException { - final FileDataSource fileDataSource = new RdfFileDataSource(new File( - FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.invalidFormatNtFileNameRoot + ".nt")); + final FileDataSource fileDataSource = new RdfFileDataSource( + FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.invalidFormatNtFileNameRoot + ".nt"); final KnowledgeBase kb = new KnowledgeBase(); kb.addStatement(new DataSourceDeclarationImpl(ternaryPredicate, fileDataSource)); diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerStateTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerStateTest.java index 8f85f806e..91c61c680 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerStateTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerStateTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -24,7 +24,6 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; -import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; @@ -221,7 +220,7 @@ public void testResetKeepExplicitDatabase() throws IOException { // assert r(d) final Predicate predicateR1 = Expressions.makePredicate("r", 1); kb.addStatement(new DataSourceDeclarationImpl(predicateR1, - new CsvFileDataSource(new File(FileDataSourceTestUtils.INPUT_FOLDER, "constantD.csv")))); + new CsvFileDataSource(FileDataSourceTestUtils.INPUT_FOLDER + "constantD.csv"))); // p(?x) -> q(?x) try (final VLogReasoner reasoner = new VLogReasoner(kb)) { diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java index 6c1e9f19d..971fedb8b 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java @@ -67,8 +67,7 @@ public static void main(final String[] args) throws IOException { /* Configure RDF data source */ final Predicate doidTriplePredicate = Expressions.makePredicate("doidTriple", 3); - final DataSource doidDataSource = new RdfFileDataSource( - new File(ExamplesUtils.INPUT_FOLDER + "doid.nt.gz")); + final DataSource doidDataSource = new RdfFileDataSource(ExamplesUtils.INPUT_FOLDER + "doid.nt.gz"); kb.addStatement(new DataSourceDeclarationImpl(doidTriplePredicate, doidDataSource)); /* Configure SPARQL data sources */ @@ -99,8 +98,8 @@ public static void main(final String[] args) throws IOException { while (parser.hasNext()) { final Object object = parser.next(); if (object instanceof fr.lirmm.graphik.graal.api.core.Rule) { - kb.addStatement( - GraalToRulewerkModelConverter.convertRule((fr.lirmm.graphik.graal.api.core.Rule) object)); + kb.addStatement(GraalToRulewerkModelConverter + .convertRule((fr.lirmm.graphik.graal.api.core.Rule) object)); } } } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java index 338b024e8..7979f154f 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,7 +20,6 @@ * #L% */ -import java.io.File; import java.io.IOException; import java.util.List; @@ -42,12 +41,12 @@ public class CsvFileDataSourceDeclarationHandler implements DataSourceDeclaratio public DataSource handleDirective(List arguments, final SubParserFactory subParserFactory) throws ParsingException { DirectiveHandler.validateNumberOfArguments(arguments, 1); - File file = DirectiveHandler.validateFilenameArgument(arguments.get(0), "source file"); + String fileName = DirectiveHandler.validateStringArgument(arguments.get(0), "source file"); try { - return new CsvFileDataSource(file); + return new CsvFileDataSource(fileName); } catch (IOException e) { - throw new ParsingException("Could not use source file \"" + file.getName() + "\": " + e.getMessage(), e); + throw new ParsingException("Could not use source file \"" + fileName + "\": " + e.getMessage(), e); } } } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java index b27f52d21..ee7a2ec79 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,7 +20,6 @@ * #L% */ -import java.io.File; import java.io.IOException; import java.util.List; @@ -42,12 +41,12 @@ public class RdfFileDataSourceDeclarationHandler implements DataSourceDeclaratio public DataSource handleDirective(List arguments, final SubParserFactory subParserFactory) throws ParsingException { DirectiveHandler.validateNumberOfArguments(arguments, 1); - File file = DirectiveHandler.validateFilenameArgument(arguments.get(0), "source file"); + String fileName = DirectiveHandler.validateStringArgument(arguments.get(0), "source file"); try { - return new RdfFileDataSource(file); + return new RdfFileDataSource(fileName); } catch (IOException e) { - throw new ParsingException("Could not use source file \"" + file.getName() + "\": " + e.getMessage(), e); + throw new ParsingException("Could not use source file \"" + fileName + "\": " + e.getMessage(), e); } } } diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java index 21919dd91..14c2bceb3 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -23,7 +23,6 @@ import static org.junit.Assert.*; import static org.mockito.Mockito.*; -import java.io.File; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; @@ -56,14 +55,14 @@ public class RuleParserDataSourceTest { @Test public void testCsvSource() throws ParsingException, IOException { String input = "@source p[2] : load-csv(\"" + EXAMPLE_CSV_FILE_PATH + "\") ."; - CsvFileDataSource csvds = new CsvFileDataSource(new File(EXAMPLE_CSV_FILE_PATH)); + CsvFileDataSource csvds = new CsvFileDataSource(EXAMPLE_CSV_FILE_PATH); assertEquals(csvds, RuleParser.parseDataSourceDeclaration(input).getDataSource()); } @Test public void testRdfSource() throws ParsingException, IOException { String input = "@source p[3] : load-rdf(\"" + EXAMPLE_RDF_FILE_PATH + "\") ."; - RdfFileDataSource rdfds = new RdfFileDataSource(new File(EXAMPLE_RDF_FILE_PATH)); + RdfFileDataSource rdfds = new RdfFileDataSource(EXAMPLE_RDF_FILE_PATH); assertEquals(rdfds, RuleParser.parseDataSourceDeclaration(input).getDataSource()); } @@ -168,7 +167,7 @@ public void sparqlDataSourceDeclarationToStringParsingTest() throws ParsingExcep public void rdfDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { KnowledgeBase kb = new KnowledgeBase(); Predicate predicate1 = Expressions.makePredicate("p", 3); - RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(new File(EXAMPLE_RDF_FILE_PATH)); + RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(EXAMPLE_RDF_FILE_PATH); DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate1, unzippedRdfFileDataSource); RuleParser.parseInto(kb, dataSourceDeclaration.toString()); @@ -179,7 +178,7 @@ public void rdfDataSourceDeclarationToStringParsingTest() throws ParsingExceptio public void csvDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { KnowledgeBase kb = new KnowledgeBase(); Predicate predicate1 = Expressions.makePredicate("q", 1); - CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File(EXAMPLE_CSV_FILE_PATH)); + CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(EXAMPLE_CSV_FILE_PATH); final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate1, unzippedCsvFileDataSource); RuleParser.parseInto(kb, dataSourceDeclaration.toString()); @@ -201,7 +200,7 @@ public DataSource handleDirective(List arguments, SubParserFa throws ParsingException { CsvFileDataSource source; try { - source = new CsvFileDataSource(new File(EXAMPLE_CSV_FILE_PATH)); + source = new CsvFileDataSource(EXAMPLE_CSV_FILE_PATH); } catch (IOException e) { throw new ParsingException(e); } From 32c0795af142b68dfd9e59cbdcdb02cdc04e9a00 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Mar 2020 18:33:25 +0100 Subject: [PATCH 0590/1003] Drop dependency on eclipse.jdt.annotations Fixes #79. --- pom.xml | 6 ------ .../model/implementation/AbstractLiteralImpl.java | 7 +++---- .../model/implementation/NegativeLiteralImpl.java | 7 +++---- .../model/implementation/PositiveLiteralImpl.java | 7 +++---- .../core/model/implementation/PredicateImpl.java | 11 +++++------ .../implementation/SparqlQueryResultDataSource.java | 9 ++++----- .../examples/core/ConfigureReasonerLogging.java | 11 +++++------ 7 files changed, 23 insertions(+), 35 deletions(-) diff --git a/pom.xml b/pom.xml index aa884b190..d3e85c1e7 100644 --- a/pom.xml +++ b/pom.xml @@ -66,7 +66,6 @@ UTF-8 - 2.1.100 4.12 2.28.2 1.7.28 @@ -81,11 +80,6 @@ - - org.eclipse.jdt - org.eclipse.jdt.annotation - ${eclipse.jdt.annotation.version} - junit junit diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java index e19864aee..d245da52a 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -26,7 +26,6 @@ import java.util.stream.Stream; import org.apache.commons.lang3.Validate; -import org.eclipse.jdt.annotation.NonNull; import org.semanticweb.rulewerk.core.model.api.Literal; import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.Term; @@ -52,7 +51,7 @@ public abstract class AbstractLiteralImpl implements Literal { * @param terms non-empty list of non-null terms. List size must be the same * as the predicate arity. */ - public AbstractLiteralImpl(@NonNull final Predicate predicate, @NonNull final List terms) { + public AbstractLiteralImpl(final Predicate predicate, final List terms) { Validate.notNull(predicate, "Literal predicates cannot be null."); Validate.noNullElements(terms, "Null terms cannot appear in literals. The list contains a null at position [%d]."); diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImpl.java index 554ae0f63..cf7b69212 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImpl.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,14 +22,13 @@ import java.util.List; -import org.eclipse.jdt.annotation.NonNull; import org.semanticweb.rulewerk.core.model.api.NegativeLiteral; import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.Term; public class NegativeLiteralImpl extends AbstractLiteralImpl implements NegativeLiteral { - public NegativeLiteralImpl(@NonNull Predicate predicate, @NonNull List terms) { + public NegativeLiteralImpl(final Predicate predicate, final List terms) { super(predicate, terms); } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImpl.java index c0aba7096..e95d5cfaa 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImpl.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,14 +22,13 @@ import java.util.List; -import org.eclipse.jdt.annotation.NonNull; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.Term; public class PositiveLiteralImpl extends AbstractLiteralImpl implements PositiveLiteral { - public PositiveLiteralImpl(@NonNull Predicate predicate, @NonNull List terms) { + public PositiveLiteralImpl(final Predicate predicate, final List terms) { super(predicate, terms); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java index 6ec346dae..04741fb8e 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,13 +21,12 @@ */ import org.apache.commons.lang3.Validate; -import org.eclipse.jdt.annotation.NonNull; import org.semanticweb.rulewerk.core.model.api.Predicate; /** * Implementation for {@link Predicate}. Supports predicates of arity 1 or * higher. - * + * * @author Irina Dragoste * */ @@ -39,11 +38,11 @@ public class PredicateImpl implements Predicate { /** * Constructor for {@link Predicate}s of arity 1 or higher. - * + * * @param name a non-blank String (not null, nor empty or whitespace). * @param arity an int value strictly greater than 0. */ - public PredicateImpl(@NonNull String name, int arity) { + public PredicateImpl(final String name, int arity) { Validate.notBlank(name, "Predicates cannot be named by blank Strings."); Validate.isTrue(arity > 0, "Predicate arity must be greater than zero: %d", arity); diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java index 2ee6c900b..0015bece5 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -26,7 +26,6 @@ import java.util.Optional; import org.apache.commons.lang3.Validate; -import org.eclipse.jdt.annotation.NonNull; import org.semanticweb.rulewerk.core.model.api.Variable; import org.semanticweb.rulewerk.core.model.implementation.Serializer; @@ -83,8 +82,8 @@ public SparqlQueryResultDataSource(final URL endpoint, final String queryVariabl */ // TODO add examples to javadoc // TODO add illegal argument exceptions to javadoc - public SparqlQueryResultDataSource(@NonNull final URL endpoint, - @NonNull final LinkedHashSet queryVariables, @NonNull final String queryBody) { + public SparqlQueryResultDataSource(final URL endpoint, + final LinkedHashSet queryVariables, final String queryBody) { Validate.notNull(endpoint, "Endpoint cannot be null."); Validate.notNull(queryVariables, "Query variables ordered set cannot be null."); Validate.noNullElements(queryVariables, "Query variables cannot be null or contain null elements."); diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java index b29262a49..81039cf41 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,7 +22,6 @@ import java.io.IOException; -import org.eclipse.jdt.annotation.Nullable; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.LogLevel; import org.semanticweb.rulewerk.core.reasoner.Reasoner; @@ -52,17 +51,17 @@ public class ConfigureReasonerLogging { * Path to the file where the default WARNING level reasoner logs will be * exported. */ - private static @Nullable String reasonerWarningLogFilePath = logsFolder + "ReasonerWarningLogFile.log"; + private static String reasonerWarningLogFilePath = logsFolder + "ReasonerWarningLogFile.log"; /** * Path to the file where INFO level reasoner logs will be exported. */ - private static @Nullable String reasonerInfoLogFilePath = logsFolder + "ReasonerInfoLogFile.log"; + private static String reasonerInfoLogFilePath = logsFolder + "ReasonerInfoLogFile.log"; /** * Path to the file where DEBUG level reasoner logs will be exported. */ - private static @Nullable String reasonerDebugLogFilePath = logsFolder + "ReasonerDebugLogFile.log"; + private static String reasonerDebugLogFilePath = logsFolder + "ReasonerDebugLogFile.log"; public static void main(final String[] args) throws IOException, ParsingException { From 5ae828c05c3f6b1963639383c0e8d3cbd615d956 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Mar 2020 19:06:23 +0100 Subject: [PATCH 0591/1003] Core: Add serialisation for KnowledgeBase --- RELEASE-NOTES.md | 1 + .../core/model/implementation/Serializer.java | 91 ++++++++++++++----- .../rulewerk/core/reasoner/KnowledgeBase.java | 35 ++++++- .../core/reasoner/KnowledgeBaseTest.java | 14 ++- 4 files changed, 117 insertions(+), 24 deletions(-) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 159b43158..24f38e9e0 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -15,6 +15,7 @@ Breaking changes: New features: * Counting query answers is more efficient now, using `Reasoner.countQueryAnswers()` * All inferred facts can be serialized to a file using `Reasoner.writeInferences()` +* Knowledge bases can be serialized to a file using `KnowlegdeBase.writeKnowledgeBase()` * Rules files may import other rules files using `@import` and `@import-relative`, where the latter resolves relative IRIs using the current base IRI, unless the imported file explicitly specifies diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index d83045815..ef0c9f766 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -94,7 +94,9 @@ private Serializer() { /** * Creates a String representation of a given {@link Rule}. * - * @see Rule syntax . + * @see Rule + * syntax . * @param rule a {@link Rule}. * @return String representation corresponding to a given {@link Rule}. * @@ -106,7 +108,9 @@ public static String getString(final Rule rule) { /** * Creates a String representation of a given {@link Conjunction}. * - * @see Rule syntax . + * @see Rule + * syntax . * @param conjunction a {@link Conjunction} * @return String representation corresponding to a given {@link Conjunction}. */ @@ -127,7 +131,9 @@ public static String getString(final Conjunction conjunction) /** * Creates a String representation of a given {@link Literal}. * - * @see Rule syntax . + * @see Rule + * syntax . * @param literal a {@link Literal} * @return String representation corresponding to a given {@link Literal}. */ @@ -143,7 +149,9 @@ public static String getString(final Literal literal) { /** * Creates a String representation of a given {@link Fact}. * - * @see Rule syntax . + * @see Rule + * syntax . * @param fact a {@link Fact} * @return String representation corresponding to a given {@link Fact}. */ @@ -154,7 +162,9 @@ public static String getFactString(final Fact fact) { /** * Creates a String representation of a given {@link AbstractConstant}. * - * @see Rule syntax . + * @see Rule + * syntax . * @param constant a {@link AbstractConstant} * @param iriTransformer a function to transform IRIs with. * @return String representation corresponding to a given @@ -167,7 +177,9 @@ public static String getString(final AbstractConstant constant, FunctionRule syntax . + * @see Rule + * syntax . * @param constant a {@link AbstractConstant} * @return String representation corresponding to a given * {@link AbstractConstant}. @@ -180,7 +192,9 @@ public static String getString(final AbstractConstant constant) { * Creates a String representation corresponding to the name of a given * {@link LanguageStringConstant}. * - * @see Rule syntax . + * @see Rule + * syntax . * @param languageStringConstant a {@link LanguageStringConstant} * @return String representation corresponding to the name of a given * {@link LanguageStringConstant}. @@ -202,10 +216,14 @@ public static String getConstantName(final LanguageStringConstant languageString *
      • {@code "23.0"^^xsd:Decimal} results in {@code 23.0},
      • *
      • {@code "42"^^xsd:Integer} results in {@code 42},
      • *
      • {@code "23.42"^^xsd:Double} results in {@code 23.42E0}, and
      • - *
      • {@code "test"^^} results in {@code "test"^^}, modulo transformation of the datatype IRI.
      • + *
      • {@code "test"^^} results in + * {@code "test"^^}, modulo transformation of the datatype + * IRI.
      • *
      * - * @see Rule syntax . + * @see Rule + * syntax . * @param datatypeConstant a {@link DatatypeConstant} * @param iriTransformer a function to transform IRIs with. * @return String representation corresponding to a given @@ -236,8 +254,10 @@ public static String getString(final DatatypeConstant datatypeConstant, Function *
    • {@code "23.0"^^xsd:Decimal} results in {@code 23.0},
    • *
    • {@code "42"^^xsd:Integer} results in {@code 42},
    • *
    • {@code "23.42"^^xsd:Double} results in {@code 23.42E0}, and
    • - *
    • {@code "test"^^} results in {@code "test"^^}.
    • + *
    • {@code "test"^^} results in + * {@code "test"^^}.
    • *
    + * * @param datatypeConstant a {@link DatatypeConstant} * @return String representation corresponding to a given * {@link DatatypeConstant}. @@ -250,7 +270,9 @@ public static String getString(final DatatypeConstant datatypeConstant) { * Creates a String representation corresponding to the name of a given * {@link DatatypeConstant} including an IRI. * - * @see Rule syntax . + * @see Rule + * syntax . * @param datatypeConstant a {@link DatatypeConstant} * @return String representation corresponding to a given * {@link DatatypeConstant}. @@ -265,7 +287,9 @@ public static String getConstantName(final DatatypeConstant datatypeConstant, * Creates a String representation corresponding to the name of a given * {@link DatatypeConstant} including an IRI. * - * @see Rule syntax . + * @see Rule + * syntax . * @param datatypeConstant a {@link DatatypeConstant} * @return String representation corresponding to a given * {@link DatatypeConstant}. @@ -278,7 +302,9 @@ public static String getConstantName(final DatatypeConstant datatypeConstant) { /** * Creates a String representation of a given {@link ExistentialVariable}. * - * @see Rule syntax . + * @see Rule + * syntax . * @param existentialVariable a {@link ExistentialVariable} * @return String representation corresponding to a given * {@link ExistentialVariable}. @@ -290,7 +316,9 @@ public static String getString(final ExistentialVariable existentialVariable) { /** * Creates a String representation of a given {@link UniversalVariable}. * - * @see Rule syntax . + * @see Rule + * syntax . * @param universalVariable a {@link UniversalVariable} * @return String representation corresponding to a given * {@link UniversalVariable}. @@ -302,7 +330,9 @@ public static String getString(final UniversalVariable universalVariable) { /** * Creates a String representation of a given {@link NamedNull}. * - * @see Rule syntax . + * @see Rule + * syntax . * @param namedNull a {@link NamedNull} * @return String representation corresponding to a given {@link NamedNull}. */ @@ -313,7 +343,9 @@ public static String getString(final NamedNull namedNull) { /** * Creates a String representation of a given {@link Predicate}. * - * @see Rule syntax . + * @see Rule + * syntax . * @param predicate a {@link Predicate} * @return String representation corresponding to a given {@link Predicate}. */ @@ -324,7 +356,9 @@ public static String getString(final Predicate predicate) { /** * Creates a String representation of a given {@link DataSourceDeclaration}. * - * @see Rule syntax . + * @see Rule + * syntax . * @param dataSourceDeclaration a {@link DataSourceDeclaration} * @return String representation corresponding to a given * {@link DataSourceDeclaration}. @@ -337,7 +371,9 @@ public static String getString(final DataSourceDeclaration dataSourceDeclaration /** * Creates a String representation of a given {@link CsvFileDataSource}. * - * @see Rule syntax .. + * @see Rule + * syntax .. * * @param csvFileDataSource * @return String representation corresponding to a given @@ -350,7 +386,9 @@ public static String getString(final CsvFileDataSource csvFileDataSource) { /** * Creates a String representation of a given {@link RdfFileDataSource}. * - * @see Rule syntax .. + * @see Rule + * syntax .. * * * @param rdfFileDataSource @@ -365,7 +403,9 @@ public static String getString(final RdfFileDataSource rdfFileDataSource) { * Creates a String representation of a given * {@link SparqlQueryResultDataSource}. * - * @see Rule syntax . + * @see Rule + * syntax . * * * @param dataSource @@ -461,7 +501,7 @@ public static String getFactString(Predicate predicate, List terms) { } public static String getFactString(Predicate predicate, List terms, Function iriTransformer) { - return getString(predicate, terms, iriTransformer) + STATEMENT_SEPARATOR + "\n"; + return getString(predicate, terms, iriTransformer) + STATEMENT_SEPARATOR + NEW_LINE; } public static String getString(Predicate predicate, List terms) { @@ -486,6 +526,15 @@ public static String getString(Predicate predicate, List terms, Function sb.append(getString(dataSource) + NEW_LINE)); + knowledgeBase.getRules().forEach(rule -> sb.append(getString(rule) + NEW_LINE)); + knowledgeBase.getFacts().forEach(fact -> sb.append(getFactString(fact) + NEW_LINE)); + + return sb.toString(); + } + public static String getBaseString(KnowledgeBase knowledgeBase) { String baseIri = knowledgeBase.getBaseIri(); diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java index e125d3267..79b9a520d 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -22,8 +22,10 @@ import java.io.File; import java.io.FileInputStream; +import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; +import java.io.OutputStream; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -40,6 +42,7 @@ import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; import org.semanticweb.rulewerk.core.exceptions.RulewerkException; import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Entity; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Predicate; @@ -48,6 +51,7 @@ import org.semanticweb.rulewerk.core.model.api.Statement; import org.semanticweb.rulewerk.core.model.api.StatementVisitor; import org.semanticweb.rulewerk.core.model.implementation.MergingPrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** * A knowledge base with rules, facts, and declarations for loading data from @@ -59,7 +63,7 @@ * @author Markus Kroetzsch * */ -public class KnowledgeBase implements Iterable { +public class KnowledgeBase implements Entity, Iterable { private final Set listeners = new HashSet<>(); @@ -555,4 +559,33 @@ public String resolvePrefixedName(String prefixedName) throws PrefixDeclarationE public String unresolveAbsoluteIri(String iri) { return this.prefixDeclarationRegistry.unresolveAbsoluteIri(iri); } + + @Override + public String getSyntacticRepresentation() { + return Serializer.getString(this); + } + + /** + * Serialise the KnowledgeBase to the {@link OutputStream}. + * + * @param stream the {@link OutputStream} to serialise to. + * + * @throws IOException + */ + public void writeKnowledgeBase(OutputStream stream) throws IOException { + stream.write(getSyntacticRepresentation().getBytes()); + } + + /** + * Serialise the KnowledgeBase to the given {@link File}. + * + * @param filePath path to the file to serialise into. + * + * @throws IOException + */ + public void writeKnowledgeBase(String filePath) throws IOException { + try (OutputStream stream = new FileOutputStream(filePath)) { + writeKnowledgeBase(stream); + } + } } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java index b44f89c24..1fd02f50a 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,6 +22,9 @@ import static org.junit.Assert.*; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.OutputStream; import java.util.Arrays; import org.junit.Before; @@ -117,4 +120,11 @@ public void mergePrefixDeclarations_merge_succeeds() throws PrefixDeclarationExc assertEquals(this.kb.resolvePrefixedName("ex:test"), iri + "test"); assertEquals(this.kb.unresolveAbsoluteIri(iri + "test"), "ex:test"); } + + @Test + public void writeKnowledgeBase_justFacts_succeeds() throws IOException { + OutputStream stream = new ByteArrayOutputStream(); + this.kb.writeKnowledgeBase(stream); + assertEquals("P(c) .\nP(d) .\nQ(c) .\n", stream.toString()); + } } From 28a4d53c45a191d7ba43ec69917a15a3921cb389 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 3 Mar 2020 18:03:08 +0100 Subject: [PATCH 0592/1003] Core: Make KnowledgeBase.writeKnowledgeBase() lazy --- .../core/model/implementation/Serializer.java | 9 ----- .../rulewerk/core/reasoner/KnowledgeBase.java | 27 +++++++++----- .../core/reasoner/KnowledgeBaseTest.java | 35 +++++++++++++++++++ 3 files changed, 53 insertions(+), 18 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index ef0c9f766..55869f980 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -526,15 +526,6 @@ public static String getString(Predicate predicate, List terms, Function sb.append(getString(dataSource) + NEW_LINE)); - knowledgeBase.getRules().forEach(rule -> sb.append(getString(rule) + NEW_LINE)); - knowledgeBase.getFacts().forEach(fact -> sb.append(getFactString(fact) + NEW_LINE)); - - return sb.toString(); - } - public static String getBaseString(KnowledgeBase knowledgeBase) { String baseIri = knowledgeBase.getBaseIri(); diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java index 79b9a520d..09ce844cc 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -42,7 +42,6 @@ import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; import org.semanticweb.rulewerk.core.exceptions.RulewerkException; import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; -import org.semanticweb.rulewerk.core.model.api.Entity; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Predicate; @@ -63,7 +62,7 @@ * @author Markus Kroetzsch * */ -public class KnowledgeBase implements Entity, Iterable { +public class KnowledgeBase implements Iterable { private final Set listeners = new HashSet<>(); @@ -473,7 +472,7 @@ public interface AdditionalInputParser { * @throws IOException when reading {@code file} fails * @throws IllegalArgumentException when {@code file} is null or has already * been imported - * @throws RulewerkException when parseFunction throws RulewerkException + * @throws RulewerkException when parseFunction throws RulewerkException */ public void importRulesFile(File file, AdditionalInputParser parseFunction) throws RulewerkException, IOException, IllegalArgumentException { @@ -560,11 +559,6 @@ public String unresolveAbsoluteIri(String iri) { return this.prefixDeclarationRegistry.unresolveAbsoluteIri(iri); } - @Override - public String getSyntacticRepresentation() { - return Serializer.getString(this); - } - /** * Serialise the KnowledgeBase to the {@link OutputStream}. * @@ -573,7 +567,22 @@ public String getSyntacticRepresentation() { * @throws IOException */ public void writeKnowledgeBase(OutputStream stream) throws IOException { - stream.write(getSyntacticRepresentation().getBytes()); + stream.write(Serializer.getBaseAndPrefixDeclarations(this).getBytes()); + + for (DataSourceDeclaration dataSource : getDataSourceDeclarations()) { + stream.write(Serializer.getString(dataSource).getBytes()); + stream.write('\n'); + } + + for (Rule rule : getRules()) { + stream.write(Serializer.getString(rule).getBytes()); + stream.write('\n'); + } + + for (Fact fact : getFacts()) { + stream.write(Serializer.getFactString(fact).getBytes()); + stream.write('\n'); + } } /** diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java index 1fd02f50a..15c633f86 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java @@ -25,6 +25,7 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; +import java.net.URL; import java.util.Arrays; import org.junit.Before; @@ -32,8 +33,12 @@ import org.mockito.internal.util.collections.Sets; import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.model.implementation.MergingPrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; public class KnowledgeBaseTest { @@ -41,6 +46,11 @@ public class KnowledgeBaseTest { private final Fact fact1 = Expressions.makeFact("P", Expressions.makeAbstractConstant("c")); private final Fact fact2 = Expressions.makeFact("P", Expressions.makeAbstractConstant("d")); private final Fact fact3 = Expressions.makeFact("Q", Expressions.makeAbstractConstant("c")); + private final PositiveLiteral literal1 = Expressions.makePositiveLiteral("P", + Expressions.makeUniversalVariable("X")); + private final PositiveLiteral literal2 = Expressions.makePositiveLiteral("Q", + Expressions.makeUniversalVariable("X")); + private final Rule rule = Expressions.makeRule(literal1, literal2); @Before public void initKB() { @@ -127,4 +137,29 @@ public void writeKnowledgeBase_justFacts_succeeds() throws IOException { this.kb.writeKnowledgeBase(stream); assertEquals("P(c) .\nP(d) .\nQ(c) .\n", stream.toString()); } + + @Test + public void writeKnowledgeBase_withBase_succeeds() throws IOException { + String baseIri = "https://example.org/"; + MergingPrefixDeclarationRegistry prefixDeclarations = new MergingPrefixDeclarationRegistry(); + prefixDeclarations.setBaseIri(baseIri); + this.kb.mergePrefixDeclarations(prefixDeclarations); + OutputStream stream = new ByteArrayOutputStream(); + this.kb.writeKnowledgeBase(stream); + assertEquals("@base <" + baseIri + "> .\nP(c) .\nP(d) .\nQ(c) .\n", stream.toString()); + } + + @Test + public void writeKnowledgeBase_alsoRuleAndDataSource_succeeds() throws IOException { + String sparqlIri = "https://example.org/sparql"; + String sparqlBgp = "?X ?p []"; + this.kb.addStatement(rule); + this.kb.addStatement(new DataSourceDeclarationImpl(Expressions.makePredicate("S", 1), + new SparqlQueryResultDataSource(new URL(sparqlIri), "?X", sparqlBgp))); + + OutputStream stream = new ByteArrayOutputStream(); + this.kb.writeKnowledgeBase(stream); + assertEquals("@source S[1]: sparql(<" + sparqlIri + ">, \"?X\", \"" + sparqlBgp + + "\") .\nP(?X) :- Q(?X) .\nP(c) .\nP(d) .\nQ(c) .\n", stream.toString()); + } } From 694458a4e56acb2bfcee9c51caf908a58f00ca2d Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 4 Mar 2020 14:29:24 +0100 Subject: [PATCH 0593/1003] Core: Fix handling of FileDataSources with backslashes in their name --- .../rulewerk/core/reasoner/implementation/FileDataSource.java | 4 ++-- .../rulewerk/core/model/DataSourceDeclarationTest.java | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java index d65bc7af1..4d9e0ea82 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java @@ -68,8 +68,8 @@ public FileDataSource(final String filePath, final Iterable possibleExte Validate.notBlank(filePath, "Data source file name cannot be null!"); this.file = new File(filePath); - this.filePath = filePath.replaceAll("\\\\", "/"); // canonicalise windows-style path separators - this.fileName = this.filePath.substring(this.filePath.lastIndexOf("/") + 1); // just the file name + this.filePath = filePath; // unmodified file path, necessary for correct serialisation + this.fileName = this.file.getName(); this.extension = getValidExtension(this.fileName, possibleExtensions); this.fileNameWithoutExtension = this.fileName.substring(0, this.fileName.lastIndexOf(this.extension)); this.dirCanonicalPath = Paths.get(file.getCanonicalPath()).getParent().toString(); diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java index 7c5ad3cba..93f52f6c4 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java @@ -99,10 +99,11 @@ public void toString_CsvFileDataSource() throws IOException { public void toString_CsvFileDataSource_absolutePath_windowsPathSeparator() throws IOException { final Predicate predicate = Expressions.makePredicate("q", 1); final String absoluteFilePathWindows = "D:\\input\\file.csv"; + final String escapedPath = absoluteFilePathWindows.replace("\\", "\\\\"); final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(absoluteFilePathWindows); final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, unzippedCsvFileDataSource); - assertEquals("@source q[1]: load-csv(\"D:/input/file.csv\") .", dataSourceDeclaration.toString()); + assertEquals("@source q[1]: load-csv(\"" + escapedPath + "\") .", dataSourceDeclaration.toString()); } @Test From 439ff8ee816ab07f9e388abf16bf710185267a4f Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 4 Mar 2020 14:52:40 +0100 Subject: [PATCH 0594/1003] Core: Improve validation in FileDataSource --- .../rulewerk/core/model/implementation/Serializer.java | 2 +- .../rulewerk/core/reasoner/implementation/FileDataSource.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index 55869f980..a77634d3c 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -420,7 +420,7 @@ public static String getString(final SparqlQueryResultDataSource dataSource) { } private static String getFileString(final FileDataSource fileDataSource) { - return getString(fileDataSource.getPath().toString()); + return getString(fileDataSource.getPath()); } private static String getIRIString(final String string) { diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java index 4d9e0ea82..ec902eb00 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java @@ -65,7 +65,7 @@ public abstract class FileDataSource extends VLogDataSource { * {@code possibleExtensions}. */ public FileDataSource(final String filePath, final Iterable possibleExtensions) throws IOException { - Validate.notBlank(filePath, "Data source file name cannot be null!"); + Validate.notBlank(filePath, "Data source file path cannot be blank!"); this.file = new File(filePath); this.filePath = filePath; // unmodified file path, necessary for correct serialisation From f6be085c2e176b14fbff55822f3937ad208d1827 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 5 Mar 2020 19:49:21 +0100 Subject: [PATCH 0595/1003] Address further review comments --- .../model/api/PrefixDeclarationRegistry.java | 19 ++++++--- .../AbstractPrefixDeclarationRegistry.java | 8 ++-- .../MergingPrefixDeclarationRegistry.java | 39 +++++++++++-------- .../core/model/implementation/Serializer.java | 2 +- .../implementation/Skolemization.java | 36 +++++++---------- .../parser/ConfigurableLiteralHandler.java | 12 ++++-- .../rulewerk/parser/DirectiveHandler.java | 38 +++++++----------- .../LocalPrefixDeclarationRegistry.java | 22 ++++++----- .../rulewerk/parser/ParserConfiguration.java | 2 +- .../ImportFileDirectiveHandler.java | 8 ++-- .../ImportFileRelativeDirectiveHandler.java | 9 +++-- .../parser/javacc/JavaCCParserBase.java | 8 ++-- 12 files changed, 109 insertions(+), 94 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java index b9dc7386a..2ec1c15bc 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java @@ -82,16 +82,25 @@ public interface PrefixDeclarationRegistry extends Iterableprefixed + * name into an absolute IRI. + * + * @param prefixedName a prefixed name of the form prefixName:localName. + * + * @throws PrefixDeclarationException when the prefixName has not been declared. + * @return an absolute IRI corresponding to prefixedName. */ String resolvePrefixedName(String prefixedName) throws PrefixDeclarationException; /** * Turn a potentially relative IRI into an absolute IRI. * - * @param potentiallyRelativeIri an IRI. - * @throws PrefixDeclarationException when called on a prefixedName using an - * unknown prefixName. + * @param relativeOrAbsoluteIri an IRI that may be relative or absolute. + * @throws PrefixDeclarationException when relativeOrAbsoluteIri is not a valid + * IRI. + * + * @return when relativeOrAbsoluteIri is an absolute IRI, it is returned as-is. + * Otherwise, the current base IRI is prepended. */ - String absolutizeIri(String potentiallyRelativeIri) throws PrefixDeclarationException; + String absolutizeIri(String relativeOrAbsoluteIri) throws PrefixDeclarationException; } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java index 48afd6a1e..892f2d33b 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java @@ -44,15 +44,15 @@ public abstract class AbstractPrefixDeclarationRegistry implements PrefixDeclara /** * Iri holding the base namespace. */ - protected String baseUri = null; + protected String baseIri = null; @Override public String getBaseIri() { - if (baseUri == null) { - baseUri = PrefixDeclarationRegistry.EMPTY_BASE; + if (baseIri == null) { + baseIri = PrefixDeclarationRegistry.EMPTY_BASE; } - return baseUri; + return baseIri; } @Override diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java index 94570bd0a..11e42fb88 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java @@ -27,6 +27,7 @@ import java.util.Map; import java.util.Map.Entry; +import org.apache.commons.lang3.Validate; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; /** @@ -41,12 +42,13 @@ final public class MergingPrefixDeclarationRegistry extends AbstractPrefixDeclar /** * Next index to use for generated prefix names. */ - private long nextIndex = 0; + private Integer nextIndex = 0; /** - * Prefix string to use for generated prefix name + * Template string to use for generated prefix name */ - private static final String GENERATED_PREFIX_PREFIX_STRING = "rulewerk_generated_"; + private static final String GENERATED_PREFIX_TEMPLATE = "rulewerk_generated_%d" + + PrefixDeclarationRegistry.PREFIX_NAME_SEPARATOR; public MergingPrefixDeclarationRegistry() { super(); @@ -59,26 +61,28 @@ public MergingPrefixDeclarationRegistry(final PrefixDeclarationRegistry prefixDe /** * Sets the base namespace to the given value. If a base Iri has already been - * set, it will be added as a prefix declaration with a fresh prefixName. + * set, one of them will be added as a prefix declaration with a fresh + * prefixName. * * @param baseIri the new base namespace. */ @Override public void setBaseIri(String baseIri) { - if (baseIri == this.baseUri) { + Validate.notNull(baseIri, "baseIri must not be null"); + if (baseIri == this.baseIri) { return; } - if (this.baseUri == null) { - this.baseUri = baseIri; - } else if (this.baseUri == PrefixDeclarationRegistry.EMPTY_BASE) { + if (this.baseIri == null) { + this.baseIri = baseIri; + } else if (this.baseIri == PrefixDeclarationRegistry.EMPTY_BASE) { // we need to keep the empty base, so that we don't // accidentally relativise absolute Iris to // baseIri. Hence, introduce baseIri as a fresh prefix. prefixes.put(getFreshPrefix(), baseIri); } else { - prefixes.put(getFreshPrefix(), this.baseUri); - this.baseUri = baseIri; + prefixes.put(getFreshPrefix(), this.baseIri); + this.baseIri = baseIri; } } @@ -109,7 +113,7 @@ public String unresolveAbsoluteIri(String iri) { String baseIri = getBaseIri(); if (baseIri != PrefixDeclarationRegistry.EMPTY_BASE && iri.startsWith(baseIri) && !iri.equals(baseIri)) { - matches.put(iri.replaceFirst(baseUri, PrefixDeclarationRegistry.EMPTY_BASE), baseUri.length()); + matches.put(iri.replaceFirst(baseIri, PrefixDeclarationRegistry.EMPTY_BASE), baseIri.length()); } prefixes.forEach((prefixName, prefixIri) -> { @@ -145,13 +149,16 @@ public void mergePrefixDeclarations(final PrefixDeclarationRegistry other) { } } + private String getNextFreshPrefixCandidate() { + return String.format(GENERATED_PREFIX_TEMPLATE, this.nextIndex++); + } + private String getFreshPrefix() { - for (long idx = nextIndex; true; ++idx) { - String freshPrefix = GENERATED_PREFIX_PREFIX_STRING + idx + PrefixDeclarationRegistry.PREFIX_NAME_SEPARATOR; + while (true) { + String candidate = getNextFreshPrefixCandidate(); - if (!prefixes.containsKey(freshPrefix)) { - this.nextIndex = idx + 1; - return freshPrefix; + if (!prefixes.containsKey(candidate)) { + return candidate; } } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index a77634d3c..a788f3367 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -277,7 +277,7 @@ public static String getString(final DatatypeConstant datatypeConstant) { * @return String representation corresponding to a given * {@link DatatypeConstant}. */ - public static String getConstantName(final DatatypeConstant datatypeConstant, + private static String getConstantName(final DatatypeConstant datatypeConstant, Function iriTransformer) { return getString(datatypeConstant.getLexicalValue()) + DOUBLE_CARET + getIRIString(datatypeConstant.getDatatype(), iriTransformer); diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java index d11d26849..3cd07bcb6 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,19 +21,16 @@ */ import java.io.ByteArrayOutputStream; -import java.io.IOException; import java.util.UUID; -import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.NamedNull; import org.semanticweb.rulewerk.core.model.implementation.RenamedNamedNull; /** - * A class that implements skolemization of named null names. The same - * name should be skolemized to the same {@link NamedNull} when - * skolemized using the same instance, but to different instances of - * {@link NamedNull} when skolemized using different instances of - * {@link Skolemization}. + * A class that implements skolemization of named null names. The same name + * should be skolemized to the same {@link NamedNull} when skolemized using the + * same instance, but two different instances of {@link NamedNull} when + * skolemized using different instances of {@link Skolemization}. * * @author Maximilian Marx */ @@ -44,21 +41,18 @@ public class Skolemization { private final byte[] namedNullNamespace = UUID.randomUUID().toString().getBytes(); /** - * Skolemize a named null name. The same {@code name} will map to - * a {@link RenamedNamedNull} instance with the same name when - * called on the same instance. + * Skolemize a named null name. The same {@code name} will map to a + * {@link RenamedNamedNull} instance with the same name when called on the same + * instance. * - * @return a {@link RenamedNamedNull} instance with a new name - * that is specific to this instance and {@code name}. + * @return a {@link RenamedNamedNull} instance with a new name that is specific + * to this instance and {@code name}. */ public RenamedNamedNull skolemizeNamedNull(String name) { + byte[] nameBytes = name.getBytes(); ByteArrayOutputStream stream = new ByteArrayOutputStream(); - try { - stream.write(namedNullNamespace); - stream.write(name.getBytes()); - return new RenamedNamedNull(UUID.nameUUIDFromBytes(stream.toByteArray())); - } catch (IOException e) { - throw new RulewerkRuntimeException(e.getMessage(), e); - } + stream.write(namedNullNamespace, 0, namedNullNamespace.length); + stream.write(nameBytes, 0, nameBytes.length); + return new RenamedNamedNull(UUID.nameUUIDFromBytes(stream.toByteArray())); } } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ConfigurableLiteralHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ConfigurableLiteralHandler.java index 0bf3ea7be..5b8fddbdd 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ConfigurableLiteralHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ConfigurableLiteralHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -24,7 +24,11 @@ import org.semanticweb.rulewerk.parser.javacc.SubParserFactory; /** - * Handler for parsing a configurable literal expression. + * Handler for parsing a configurable literal expression. Note that these are + * not Literals in the logical sense (i.e., Atoms with a specific polarity), but + * rather expressions in the sense of + * RDF literals; + * essentially adding further quoted literals with custom delimiters. * * @author Maximilian Marx */ @@ -38,7 +42,7 @@ public interface ConfigurableLiteralHandler { * parser's state, but bound to new input. * * @throws ParsingException when the given syntactic form is invalid. - * @return an appropriate @{link Constant} instance. + * @return an appropriate @{link Term} instance. */ public Term parseLiteral(String syntacticForm, final SubParserFactory subParserFactory) throws ParsingException; } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java index 01c9fc73c..146faf39c 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -25,6 +25,7 @@ import java.net.MalformedURLException; import java.net.URI; import java.net.URL; +import java.nio.file.InvalidPathException; import java.util.List; import java.util.NoSuchElementException; @@ -58,7 +59,7 @@ public T handleDirective(List arguments, final SubParserFacto throws ParsingException; /** - * Validate the provided number of arguments to the data source. + * Validate the provided number of arguments to the directive statement. * * @param arguments Arguments given to the Directive statement. * @param number expected number of arguments @@ -87,23 +88,20 @@ public static void validateNumberOfArguments(final List argum */ public static String validateStringArgument(final DirectiveArgument argument, final String description) throws ParsingException { - try { - return argument.fromString().get(); - } catch (NoSuchElementException e) { - throw new ParsingException(description + "\"" + argument + "\" is not a string.", e); - } + return argument.fromString() + .orElseThrow(() -> new ParsingException("description \"" + argument + "\" is not a string.")); } /** - * Validate that the provided argument is a file name. + * Validate that the provided argument is a file path. * * @param argument the argument to validate * @param description a description of the argument, used in constructing the * error message. * - * @throws ParsingException when the given argument is not a valid file name. + * @throws ParsingException when the given argument is not a valid file path. * - * @return the File corresponding to the contained file name. + * @return the File corresponding to the contained file path. */ public static File validateFilenameArgument(final DirectiveArgument argument, final String description) throws ParsingException { @@ -111,8 +109,8 @@ public static File validateFilenameArgument(final DirectiveArgument argument, fi File file = new File(fileName); try { // we don't care about the actual path, just that there is one. - file.getCanonicalPath(); - } catch (IOException e) { + file.toPath(); + } catch (InvalidPathException e) { throw new ParsingException(description + "\"" + argument + "\" is not a valid file path.", e); } @@ -132,11 +130,8 @@ public static File validateFilenameArgument(final DirectiveArgument argument, fi */ public static URI validateIriArgument(final DirectiveArgument argument, final String description) throws ParsingException { - try { - return argument.fromIri().get(); - } catch (NoSuchElementException e) { - throw new ParsingException(description + "\"" + argument + "\" is not an IRI.", e); - } + return argument.fromIri() + .orElseThrow(() -> new ParsingException(description + "\"" + argument + "\" is not an IRI.")); } /** @@ -173,11 +168,8 @@ public static URL validateUrlArgument(final DirectiveArgument argument, final St */ public static Term validateTermArgument(final DirectiveArgument argument, final String description) throws ParsingException { - try { - return argument.fromTerm().get(); - } catch (NoSuchElementException e) { - throw new ParsingException(description + "\"" + argument + "\" is not a string.", e); - } + return argument.fromTerm() + .orElseThrow(() -> new ParsingException(description + "\"" + argument + "\" is not a Term.")); } /** diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java index a72def47d..1f13e7799 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java @@ -1,5 +1,7 @@ package org.semanticweb.rulewerk.parser; +import org.apache.commons.lang3.Validate; + /*- * #%L * Rulewerk Parser @@ -56,21 +58,22 @@ public LocalPrefixDeclarationRegistry() { */ public LocalPrefixDeclarationRegistry(String fallbackIri) { super(); + Validate.notNull(fallbackIri, "fallbackIri must not be null"); this.fallbackIri = fallbackIri; } /** * Returns the relevant base namespace. Returns the fallback IRI if no base - * namespace has been set yet. + * namespace has been set yet, and sets that as the base IRI. * * @return string of an absolute base IRI */ @Override public String getBaseIri() { - if (this.baseUri == null) { - this.baseUri = this.fallbackIri; + if (this.baseIri == null) { + this.baseIri = this.fallbackIri; } - return baseUri.toString(); + return baseIri; } @Override @@ -87,15 +90,16 @@ public void setPrefixIri(String prefixName, String prefixIri) throws PrefixDecla * Sets the base namespace to the given value. This should only be done once, * and not after the base namespace was assumed to be an implicit default value. * - * @param baseUri the new base namespace + * @param baseIri the new base namespace * @throws PrefixDeclarationException if base was already defined */ @Override - public void setBaseIri(String baseUri) throws PrefixDeclarationException { - if (this.baseUri != null) + public void setBaseIri(String baseIri) throws PrefixDeclarationException { + Validate.notNull(baseIri, "baseIri must not be null"); + if (this.baseIri != null) throw new PrefixDeclarationException( - "Base is already defined as <" + this.baseUri + "> and cannot be re-defined as " + baseUri); - this.baseUri = baseUri; + "Base is already defined as <" + this.baseIri + "> and cannot be re-defined as " + baseIri); + this.baseIri = baseIri; } } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java index 5843f1db7..8213f30a1 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java @@ -299,7 +299,7 @@ public ParserConfiguration disallowNamedNulls() { * Whether parsing of {@link org.semanticweb.rulewerk.core.model.api.NamedNull} is * allowed. * - * @return this + * @return true iff parsing of NamedNulls is allowed. */ public boolean isParsingOfNamedNullsAllowed() { return this.allowNamedNulls; diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java index 337475363..9c24f2ab1 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,9 +21,11 @@ */ import java.io.File; +import java.io.IOException; import java.io.InputStream; import java.util.List; +import org.semanticweb.rulewerk.core.exceptions.RulewerkException; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.parser.DirectiveArgument; import org.semanticweb.rulewerk.parser.DirectiveHandler; @@ -51,7 +53,7 @@ public KnowledgeBase handleDirective(List arguments, final Su knowledgeBase.importRulesFile(file, (InputStream stream, KnowledgeBase kb) -> { RuleParser.parseInto(kb, stream, parserConfiguration); }); - } catch (Exception e) { + } catch (RulewerkException | IOException | IllegalArgumentException e) { throw new ParsingException("Failed while trying to import rules file \"" + file.getName() + "\"", e); } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java index 1de8df9f5..62b5e246f 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,9 +21,12 @@ */ import java.io.File; +import java.io.IOException; import java.io.InputStream; import java.util.List; +import org.semanticweb.rulewerk.core.exceptions.RulewerkException; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.parser.DirectiveArgument; @@ -52,7 +55,7 @@ public KnowledgeBase handleDirective(List arguments, SubParse knowledgeBase.importRulesFile(file, (InputStream stream, KnowledgeBase kb) -> { RuleParser.parseInto(kb, stream, parserConfiguration, prefixDeclarationRegistry.getBaseIri()); }); - } catch (Exception e) { + } catch (RulewerkException | IOException | IllegalArgumentException e) { throw new ParsingException("Failed while trying to import rules file \"" + file.getName() + "\"", e); } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java index 3e38aefde..a7878d797 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -148,8 +148,8 @@ AbstractConstant createConstant(String lexicalForm) throws ParseException { /** * Creates a suitable {@link Constant} from the parsed data. * - * @param string the string data (unescaped) - * @param datatype the datatype, or null if not provided + * @param lexicalForm the string data (unescaped) + * @param datatype the datatype, or null if not provided * @return suitable constant */ Constant createConstant(String lexicalForm, String datatype) throws ParseException { From 95254588c20452f85a9dabf76f878307359e0020 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Sun, 8 Mar 2020 04:38:54 +0100 Subject: [PATCH 0596/1003] Core: Fix typo. --- .../rulewerk/core/reasoner/implementation/Skolemization.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java index 3cd07bcb6..f60081c15 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java @@ -29,7 +29,7 @@ /** * A class that implements skolemization of named null names. The same name * should be skolemized to the same {@link NamedNull} when skolemized using the - * same instance, but two different instances of {@link NamedNull} when + * same instance, but to two different instances of {@link NamedNull} when * skolemized using different instances of {@link Skolemization}. * * @author Maximilian Marx From c941c8ab2cc330c8910f9a08804d8e7cc52fc725 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 16 Mar 2020 11:32:47 +0100 Subject: [PATCH 0597/1003] Core: Fix script to build local VLog --- build-vlog-library.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build-vlog-library.sh b/build-vlog-library.sh index 02ecd116f..e82a046e0 100755 --- a/build-vlog-library.sh +++ b/build-vlog-library.sh @@ -21,6 +21,6 @@ else cd ../../.. fi -mkdir local_builds/jvlog.jar rulewerk-core/lib +mkdir -p rulewerk-core/lib cp local_builds/jvlog.jar rulewerk-core/lib/jvlog-local.jar mvn initialize -Pdevelopment From acc9f03947b5ce284042cf465509a0e899d4f1cf Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 15 May 2020 15:31:03 +0200 Subject: [PATCH 0598/1003] Fix search box when building javadoc usig JDK9+ Fixes #170. --- pom.xml | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/pom.xml b/pom.xml index d3e85c1e7..944c4f5fb 100644 --- a/pom.xml +++ b/pom.xml @@ -407,6 +407,24 @@ + + java-9 + + [9,) + + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven.javadoc.version} + + --no-module-directories + + + + + From 3022f41553c8ff8a81a3aed7bc3176284c9e9f7d Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 15 May 2020 15:31:31 +0200 Subject: [PATCH 0599/1003] Fix javadoc warnings --- .../model/implementation/Expressions.java | 2 +- .../core/model/implementation/Serializer.java | 72 +++++-------------- .../rulewerk/core/reasoner/KnowledgeBase.java | 4 +- .../owlapi/AbstractClassToRuleConverter.java | 18 ++--- .../parser/DatatypeConstantHandler.java | 6 +- .../LocalPrefixDeclarationRegistry.java | 2 +- .../rulewerk/parser/ParserConfiguration.java | 3 +- 7 files changed, 35 insertions(+), 72 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java index 74529fb51..d1b62815a 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java @@ -302,7 +302,7 @@ public static Conjunction makeConjunction(final Literal... literals) { } /** - * Creates a {@code Conjunction} of {@link T} ({@link PositiveLiteral} type) + * Creates a {@code Conjunction} of {@code T} ({@link PositiveLiteral} type) * objects. * * @param literals list of non-null positive literals diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index a788f3367..b4c6d07cd 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -94,9 +94,7 @@ private Serializer() { /** * Creates a String representation of a given {@link Rule}. * - * @see Rule - * syntax . + * @see Rule syntax * @param rule a {@link Rule}. * @return String representation corresponding to a given {@link Rule}. * @@ -108,9 +106,7 @@ public static String getString(final Rule rule) { /** * Creates a String representation of a given {@link Conjunction}. * - * @see Rule - * syntax . + * @see Rule syntax * @param conjunction a {@link Conjunction} * @return String representation corresponding to a given {@link Conjunction}. */ @@ -131,9 +127,7 @@ public static String getString(final Conjunction conjunction) /** * Creates a String representation of a given {@link Literal}. * - * @see Rule - * syntax . + * @see Rule syntax * @param literal a {@link Literal} * @return String representation corresponding to a given {@link Literal}. */ @@ -149,9 +143,7 @@ public static String getString(final Literal literal) { /** * Creates a String representation of a given {@link Fact}. * - * @see Rule - * syntax . + * @see Rule syntax * @param fact a {@link Fact} * @return String representation corresponding to a given {@link Fact}. */ @@ -162,9 +154,7 @@ public static String getFactString(final Fact fact) { /** * Creates a String representation of a given {@link AbstractConstant}. * - * @see Rule - * syntax . + * @see Rule syntax * @param constant a {@link AbstractConstant} * @param iriTransformer a function to transform IRIs with. * @return String representation corresponding to a given @@ -177,9 +167,7 @@ public static String getString(final AbstractConstant constant, FunctionRule - * syntax . + * @see Rule syntax * @param constant a {@link AbstractConstant} * @return String representation corresponding to a given * {@link AbstractConstant}. @@ -192,9 +180,7 @@ public static String getString(final AbstractConstant constant) { * Creates a String representation corresponding to the name of a given * {@link LanguageStringConstant}. * - * @see Rule - * syntax . + * @see Rule syntax * @param languageStringConstant a {@link LanguageStringConstant} * @return String representation corresponding to the name of a given * {@link LanguageStringConstant}. @@ -221,9 +207,7 @@ public static String getConstantName(final LanguageStringConstant languageString * IRI. * * - * @see Rule - * syntax . + * @see Rule syntax * @param datatypeConstant a {@link DatatypeConstant} * @param iriTransformer a function to transform IRIs with. * @return String representation corresponding to a given @@ -270,9 +254,7 @@ public static String getString(final DatatypeConstant datatypeConstant) { * Creates a String representation corresponding to the name of a given * {@link DatatypeConstant} including an IRI. * - * @see Rule - * syntax . + * @see Rule syntax * @param datatypeConstant a {@link DatatypeConstant} * @return String representation corresponding to a given * {@link DatatypeConstant}. @@ -287,9 +269,7 @@ private static String getConstantName(final DatatypeConstant datatypeConstant, * Creates a String representation corresponding to the name of a given * {@link DatatypeConstant} including an IRI. * - * @see Rule - * syntax . + * @see Rule syntax * @param datatypeConstant a {@link DatatypeConstant} * @return String representation corresponding to a given * {@link DatatypeConstant}. @@ -302,9 +282,7 @@ public static String getConstantName(final DatatypeConstant datatypeConstant) { /** * Creates a String representation of a given {@link ExistentialVariable}. * - * @see Rule - * syntax . + * @see Rule syntax * @param existentialVariable a {@link ExistentialVariable} * @return String representation corresponding to a given * {@link ExistentialVariable}. @@ -316,9 +294,7 @@ public static String getString(final ExistentialVariable existentialVariable) { /** * Creates a String representation of a given {@link UniversalVariable}. * - * @see Rule - * syntax . + * @see Rule syntax * @param universalVariable a {@link UniversalVariable} * @return String representation corresponding to a given * {@link UniversalVariable}. @@ -330,9 +306,7 @@ public static String getString(final UniversalVariable universalVariable) { /** * Creates a String representation of a given {@link NamedNull}. * - * @see Rule - * syntax . + * @see Rule syntax * @param namedNull a {@link NamedNull} * @return String representation corresponding to a given {@link NamedNull}. */ @@ -343,9 +317,7 @@ public static String getString(final NamedNull namedNull) { /** * Creates a String representation of a given {@link Predicate}. * - * @see Rule - * syntax . + * @see Rule syntax * @param predicate a {@link Predicate} * @return String representation corresponding to a given {@link Predicate}. */ @@ -356,9 +328,7 @@ public static String getString(final Predicate predicate) { /** * Creates a String representation of a given {@link DataSourceDeclaration}. * - * @see Rule - * syntax . + * @see Rule syntax * @param dataSourceDeclaration a {@link DataSourceDeclaration} * @return String representation corresponding to a given * {@link DataSourceDeclaration}. @@ -371,9 +341,7 @@ public static String getString(final DataSourceDeclaration dataSourceDeclaration /** * Creates a String representation of a given {@link CsvFileDataSource}. * - * @see Rule - * syntax .. + * @see Rule syntax * * @param csvFileDataSource * @return String representation corresponding to a given @@ -386,9 +354,7 @@ public static String getString(final CsvFileDataSource csvFileDataSource) { /** * Creates a String representation of a given {@link RdfFileDataSource}. * - * @see Rule - * syntax .. + * @see Rule syntax * * * @param rdfFileDataSource @@ -403,9 +369,7 @@ public static String getString(final RdfFileDataSource rdfFileDataSource) { * Creates a String representation of a given * {@link SparqlQueryResultDataSource}. * - * @see Rule - * syntax . + * @see Rule syntax * * * @param dataSource diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java index 09ce844cc..6cadfa1af 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -534,7 +534,7 @@ public String getPrefixIri(String prefixName) throws PrefixDeclarationException /** * Resolve a prefixed name into an absolute IRI. Dual to - * {@link unresolveAbsoluteIri}. + * {@link KnowledgeBase#unresolveAbsoluteIri}. * * @param prefixedName the prefixed name to resolve. * @@ -548,7 +548,7 @@ public String resolvePrefixedName(String prefixedName) throws PrefixDeclarationE /** * Potentially abbreviate an absolute IRI using the declared prefixes. Dual to - * {@link resolvePrefixedName}. + * {@link KnowledgeBase#resolvePrefixedName}. * * @param iri the absolute IRI to abbreviate. * diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/AbstractClassToRuleConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/AbstractClassToRuleConverter.java index 7c8ab043c..b83e5c142 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/AbstractClassToRuleConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/AbstractClassToRuleConverter.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -180,21 +180,21 @@ public AbstractClassToRuleConverter(final Term mainTerm, final SimpleConjunction } /** - * Returns true if the current rule is a tautology, i.e., has an unsatisfiable - * body or a tautological head. + * Check whether the current rule is a tautology. * - * @return + * @return true if the current rule is a tautology, i.e., has an + * unsatisfiable body or a tautological head. */ public boolean isTautology() { return this.body.isFalse() || this.head.isTrue(); } /** - * Returns true if the current rule represents a falsity, i.e., has a - * tautological (or non-existent) body and an unsatisfiable (or no-existent) - * head. + * Checks whether the current rule is a falsity. * - * @return + * @return true if the current rule represents a falsity, i.e., + * has a tautological (or non-existent) body and an unsatisfiable + * (or no-existent) head. */ public boolean isFalsity() { return this.body.isTrueOrEmpty() && this.head.isFalseOrEmpty(); diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DatatypeConstantHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DatatypeConstantHandler.java index bc94fc7ba..a64eac992 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DatatypeConstantHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DatatypeConstantHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -37,7 +37,7 @@ public interface DatatypeConstantHandler { * @throws ParsingException when the given representation is invalid for this * datatype. * - * @return + * @return a {@link DatatypeConstant} corresponding to the lexical form. */ public DatatypeConstant createConstant(String lexicalForm) throws ParsingException; } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java index 1f13e7799..c61ec77c3 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java @@ -43,7 +43,7 @@ final public class LocalPrefixDeclarationRegistry extends AbstractPrefixDeclarat /** * Construct a Prefix declaration registry without an inherited base IRI. In - * this case, we default to {@value PrefixDeclarationRegistry#EMPTY_BASE}. + * this case, we default to {@value org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry#EMPTY_BASE}. */ public LocalPrefixDeclarationRegistry() { this(PrefixDeclarationRegistry.EMPTY_BASE); // empty string encodes: "no base" (use relative IRIs) diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java index 8213f30a1..41a551a75 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java @@ -78,8 +78,7 @@ public class ParserConfiguration { * production of the rules grammar, corresponding to some {@link DataSource} * type. * - * @see - * the grammar. + * @see the grammar * * @param name Name of the data source, as it appears in the declaring * directive. From ab6d612b4fa2429fa738183a210eab65052bc66e Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 18 May 2020 12:26:52 +0200 Subject: [PATCH 0600/1003] Parser: Fix broken test on Windows --- .../rulewerk/parser/DirectiveHandlerTest.java | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java index ef0a4eb6c..02f58e5ad 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,6 +21,7 @@ */ import static org.junit.Assert.*; +import java.io.File; import java.net.MalformedURLException; import java.net.URI; @@ -39,7 +40,7 @@ public class DirectiveHandlerTest { @Test public void validateStringArgument_stringArgument_succeeds() throws ParsingException { - assertEquals(DirectiveHandler.validateStringArgument(STRING_ARGUMENT, "string argument"), STRING); + assertEquals(STRING, DirectiveHandler.validateStringArgument(STRING_ARGUMENT, "string argument")); } @Test(expected = ParsingException.class) @@ -54,7 +55,7 @@ public void validateStringArgument_termArgument_throws() throws ParsingException @Test public void validateIriArgument_iriArgument_succeeds() throws ParsingException { - assertEquals(DirectiveHandler.validateIriArgument(IRI_ARGUMENT, "iri argument"), IRI); + assertEquals(IRI, DirectiveHandler.validateIriArgument(IRI_ARGUMENT, "iri argument")); } @Test(expected = ParsingException.class) @@ -69,7 +70,7 @@ public void validateIriArgument_termArgument_throws() throws ParsingException { @Test public void validateTermArgument_termArgument_succeeds() throws ParsingException { - assertEquals(DirectiveHandler.validateTermArgument(TERM_ARGUMENT, "term argument"), TERM); + assertEquals(TERM, DirectiveHandler.validateTermArgument(TERM_ARGUMENT, "term argument")); } @Test(expected = ParsingException.class) @@ -84,7 +85,7 @@ public void validateTermArgument_iriArgument_throws() throws ParsingException { @Test public void validateFilenameArgument_filename_succeeds() throws ParsingException { - assertEquals(DirectiveHandler.validateFilenameArgument(STRING_ARGUMENT, "filename argument").getPath(), STRING); + assertEquals(new File(STRING), DirectiveHandler.validateFilenameArgument(STRING_ARGUMENT, "filename argument")); } @Test @@ -95,7 +96,7 @@ public void validateFilenameArgument_invalidFilename_throws() throws ParsingExce @Test public void validateUrlArgument_url_succeeds() throws ParsingException, MalformedURLException { - assertEquals(DirectiveHandler.validateUrlArgument(IRI_ARGUMENT, "urls argument"), IRI.toURL()); + assertEquals(IRI.toURL(), DirectiveHandler.validateUrlArgument(IRI_ARGUMENT, "urls argument")); } @Test(expected = ParsingException.class) From ca0851ea0828f1e23fc8e12a5867076b10f16d07 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Larry=20Gonz=C3=A1lez?= Date: Mon, 18 May 2020 14:45:12 +0200 Subject: [PATCH 0601/1003] remove unused imports; use autoformat --- coverage/pom.xml | 103 +++++++++--------- .../implementation/CsvFileDataSource.java | 1 - .../implementation/RdfFileDataSource.java | 5 +- .../core/model/DataSourceDeclarationTest.java | 4 +- .../implementation/AddDataSourceTest.java | 4 +- .../FileDataSourceTestUtils.java | 2 - 6 files changed, 56 insertions(+), 63 deletions(-) diff --git a/coverage/pom.xml b/coverage/pom.xml index 977046572..7b75d4ebd 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -1,4 +1,5 @@ - 4.0.0 @@ -13,58 +14,58 @@ coverage - - org.semanticweb.rulewerk - rulewerk-core - 0.6.0-SNAPSHOT - - - org.semanticweb.rulewerk - rulewerk-rdf - 0.6.0-SNAPSHOT - - - org.semanticweb.rulewerk - rulewerk-owlapi - 0.6.0-SNAPSHOT - - - org.semanticweb.rulewerk - rulewerk-graal - 0.6.0-SNAPSHOT - - - org.semanticweb.rulewerk - rulewerk-parser - 0.6.0-SNAPSHOT - - - org.semanticweb.rulewerk - rulewerk-client - 0.6.0-SNAPSHOT - + + org.semanticweb.rulewerk + rulewerk-core + 0.6.0-SNAPSHOT + + + org.semanticweb.rulewerk + rulewerk-rdf + 0.6.0-SNAPSHOT + + + org.semanticweb.rulewerk + rulewerk-owlapi + 0.6.0-SNAPSHOT + + + org.semanticweb.rulewerk + rulewerk-graal + 0.6.0-SNAPSHOT + + + org.semanticweb.rulewerk + rulewerk-parser + 0.6.0-SNAPSHOT + + + org.semanticweb.rulewerk + rulewerk-client + 0.6.0-SNAPSHOT + - - - - org.eluder.coveralls - coveralls-maven-plugin - - - org.jacoco - jacoco-maven-plugin - - - aggregate-reports-ut - test - - report-aggregate - - - - - + + + + org.eluder.coveralls + coveralls-maven-plugin + + + org.jacoco + jacoco-maven-plugin + + + aggregate-reports-ut + test + + report-aggregate + + + + + diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java index 7b7812b4c..cb1dea326 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java @@ -20,7 +20,6 @@ * #L% */ -import java.io.File; import java.io.IOException; import java.util.Arrays; diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java index e56148544..265db485c 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java @@ -20,7 +20,6 @@ * #L% */ -import java.io.File; import java.io.IOException; import java.util.Arrays; import java.util.Optional; @@ -55,8 +54,8 @@ public class RdfFileDataSource extends FileDataSource { /** * Constructor. * - * @param rdfFile path to a file of a {@code .nt} or {@code .nt.gz} extension and a - * valid N-Triples format. + * @param rdfFile path to a file of a {@code .nt} or {@code .nt.gz} extension + * and a valid N-Triples format. * @throws IOException if the path of the given {@code rdfFile} is * invalid. * @throws IllegalArgumentException if the extension of the given diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java index 93f52f6c4..310c24715 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java @@ -23,7 +23,6 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; -import java.io.File; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; @@ -91,8 +90,7 @@ public void toString_CsvFileDataSource() throws IOException { unzippedCsvFileDataSource); final String expectedFilePath = Serializer.getString(relativeDirName + fileName); - assertEquals("@source q[1]: load-csv(" + expectedFilePath + ") .", - dataSourceDeclaration.toString()); + assertEquals("@source q[1]: load-csv(" + expectedFilePath + ") .", dataSourceDeclaration.toString()); } @Test diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java index ce8ae45ef..2739ae08c 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java @@ -23,7 +23,6 @@ * #L% */ -import java.io.File; import java.io.IOException; import java.util.Arrays; import java.util.HashSet; @@ -223,8 +222,7 @@ public void testAddDataSourceNoFactsForPredicate() throws IOException { public void testAddMultipleDataSourcesForPredicateAfterReasoning() throws IOException { final Predicate predicate = Expressions.makePredicate("p", 1); final DataSource dataSource1 = new CsvFileDataSource(CSV_FILE_c1_c2_PATH); - final DataSource dataSource2 = new CsvFileDataSource( - FileDataSourceTestUtils.INPUT_FOLDER + "unaryFactsCD.csv"); + final DataSource dataSource2 = new CsvFileDataSource(FileDataSourceTestUtils.INPUT_FOLDER + "unaryFactsCD.csv"); final KnowledgeBase kb = new KnowledgeBase(); kb.addStatement(new DataSourceDeclarationImpl(predicate, dataSource1)); diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java index ea714f865..dc027e2a0 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java @@ -3,8 +3,6 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; -import java.io.File; - /*- * #%L * Rulewerk Core Components From 5312938ae685f47b1edc8d1a0aff160f54cd31fd Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 1 Apr 2020 06:35:00 +0200 Subject: [PATCH 0602/1003] Core: Split out VLog-specific code into a rulewerk-vlog --- RELEASE-NOTES.md | 5 + coverage/pom.xml | 65 +++--- pom.xml | 1 + rulewerk-client/pom.xml | 5 + .../rulewerk/client/picocli/ClientUtils.java | 4 +- .../client/picocli/PrintQueryResults.java | 4 +- .../client/picocli/RulewerkClient.java | 4 +- .../picocli/RulewerkClientMaterialize.java | 6 +- .../rulewerk/client/picocli/SaveModel.java | 4 +- .../client/picocli/SaveQueryResults.java | 4 +- rulewerk-core/pom.xml | 49 +---- .../IncompatiblePredicateArityException.java | 4 +- .../PrefixDeclarationException.java | 4 +- .../exceptions/ReasonerStateException.java | 4 +- .../core/exceptions/RulewerkException.java | 4 +- .../exceptions/RulewerkRuntimeException.java | 4 +- .../model/api/PrefixDeclarationRegistry.java | 4 +- .../implementation/AbstractLiteralImpl.java | 4 +- .../AbstractPrefixDeclarationRegistry.java | 4 +- .../model/implementation/Expressions.java | 4 +- .../MergingPrefixDeclarationRegistry.java | 4 +- .../implementation/NegativeLiteralImpl.java | 4 +- .../implementation/PositiveLiteralImpl.java | 4 +- .../model/implementation/PredicateImpl.java | 4 +- .../core/model/implementation/Serializer.java | 4 +- .../rulewerk/core/reasoner/KnowledgeBase.java | 4 +- .../rulewerk/core/reasoner/Reasoner.java | 21 +- .../implementation/CsvFileDataSource.java | 8 +- .../DataSourceConfigurationVisitor.java | 31 +++ .../EmptyQueryResultIterator.java | 2 +- .../implementation/FileDataSource.java | 28 +-- .../implementation/InMemoryDataSource.java | 25 ++- .../implementation/QueryAnswerCountImpl.java | 12 +- .../implementation/QueryResultImpl.java | 6 +- .../implementation/RdfFileDataSource.java | 9 +- ...ataSource.java => ReasonerDataSource.java} | 20 +- .../implementation/Skolemization.java | 4 +- .../SparqlQueryResultDataSource.java | 31 +-- .../core/model/DataSourceDeclarationTest.java | 4 +- .../MergingPrefixDeclarationRegistryTest.java | 4 +- .../core/reasoner/KnowledgeBaseTest.java | 4 +- .../implementation/QueryResultImplTest.java | 1 - rulewerk-examples/pom.xml | 5 + .../examples/CompareWikidataDBpedia.java | 2 +- .../rulewerk/examples/CountingTriangles.java | 2 +- .../rulewerk/examples/DoidExample.java | 2 +- .../rulewerk/examples/ExamplesUtils.java | 4 +- .../InMemoryGraphAnalysisExample.java | 2 +- .../examples/SimpleReasoningExample.java | 6 +- .../examples/core/AddDataFromCsvFile.java | 2 +- .../examples/core/AddDataFromRdfFile.java | 2 +- .../core/AddDataFromSparqlQueryResults.java | 5 +- .../core/ConfigureReasonerLogging.java | 7 +- .../SkolemVsRestrictedChaseTermination.java | 2 +- .../examples/graal/AddDataFromDlgpFile.java | 7 +- .../examples/graal/AddDataFromGraal.java | 6 +- .../examples/graal/DoidExampleGraal.java | 6 +- .../owlapi/OwlOntologyToRulesAndFacts.java | 6 +- .../examples/rdf/AddDataFromRdfModel.java | 6 +- .../rulewerk/graal/GraalConvertException.java | 4 +- .../graal/GraalToRulewerkModelConverter.java | 4 +- .../GraalToRulewerkModelConverterTest.java | 4 +- .../owlapi/OwlAxiomToRulesConverter.java | 4 +- .../OwlFeatureNotSupportedException.java | 4 +- .../owlapi/OwlToRulesConversionHelper.java | 4 +- rulewerk-parser/pom.xml | 1 - .../parser/ConfigurableLiteralHandler.java | 4 +- .../rulewerk/parser/DirectiveHandler.java | 4 +- .../LocalPrefixDeclarationRegistry.java | 4 +- .../rulewerk/parser/ParserConfiguration.java | 4 +- .../rulewerk/parser/ParsingException.java | 4 +- .../CsvFileDataSourceDeclarationHandler.java | 4 +- .../RdfFileDataSourceDeclarationHandler.java | 4 +- .../ImportFileDirectiveHandler.java | 4 +- .../ImportFileRelativeDirectiveHandler.java | 4 +- .../parser/javacc/JavaCCParserBase.java | 4 +- .../parser/RuleParserDataSourceTest.java | 4 +- rulewerk-rdf/pom.xml | 10 +- .../rulewerk/rdf/RdfValueToTermConverter.java | 4 +- .../rulewerk/rdf/TestReasonOverRdfFacts.java | 2 +- rulewerk-vlog/LICENSE.txt | 201 ++++++++++++++++++ rulewerk-vlog/pom.xml | 70 ++++++ .../reasoner/vlog}/ModelToVLogConverter.java | 8 +- .../reasoner/vlog}/TermToVLogConverter.java | 9 +- .../VLogDataSourceConfigurationVisitor.java | 73 +++++++ .../reasoner/vlog}/VLogKnowledgeBase.java | 13 +- .../vlog}/VLogQueryResultIterator.java | 6 +- .../rulewerk/reasoner/vlog}/VLogReasoner.java | 11 +- .../reasoner/vlog}/VLogToModelConverter.java | 16 +- .../src/test/data/input/binaryFacts.csv | 0 .../src/test/data/input/constantD.csv | 0 .../src/test/data/input/empty.csv | 0 .../test/data/input/invalidFormatNtFacts.nt | 0 .../src/test/data/input/ternaryFacts.nt | 0 .../test/data/input/ternaryFactsZipped.nt.gz | Bin .../src/test/data/input/unaryFacts.csv | 0 .../src/test/data/input/unaryFactsCD.csv | 0 .../test/data/input/unaryFactsZipped.csv.gz | Bin .../src/test/data/output/.keep | 0 .../src/test/data/output/binaryFacts.csv | 2 + .../src/test/data/output/exclude_blanks.csv | 0 .../src/test/data/output/include_blanks.csv | 2 + .../src/test/data/output/unaryFacts.csv | 2 + .../reasoner/vlog}/AddDataSourceTest.java | 9 +- .../reasoner/vlog}/AnswerQueryTest.java | 4 +- .../reasoner/vlog}/CsvFileDataSourceTest.java | 25 +-- .../vlog/ExportQueryResultToCsvFileTest.java | 5 +- .../vlog}/FileDataSourceTestUtils.java | 11 +- .../GeneratedAnonymousIndividualsTest.java | 4 +- .../reasoner/vlog/LargeAritiesTest.java | 4 +- .../rulewerk/reasoner/vlog}/LoggingTest.java | 27 +-- .../vlog}/ModelToVLogConverterTest.java | 4 +- .../reasoner/vlog}/QueryAnswerCountTest.java | 4 +- .../vlog}/QueryAnsweringCorrectnessTest.java | 5 +- .../reasoner/vlog}/QueryResultsUtils.java | 4 +- .../reasoner/vlog}/RdfFileDataSourceTest.java | 26 +-- .../reasoner/vlog}/ReasonerTimeoutTest.java | 18 +- .../SparqlQueryResultDataSourceTest.java | 28 +-- .../reasoner/vlog/StratifiedNegationTest.java | 11 +- .../vlog/VLogDataFromCsvFileTest.java | 5 +- .../reasoner/vlog/VLogDataFromMemoryTest.java | 4 +- .../vlog/VLogDataFromRdfFileTest.java | 5 +- ...LogDataSourceConfigurationVisitorTest.java | 105 +++++++++ .../reasoner/vlog/VLogExpressions.java | 4 +- .../reasoner/vlog/VLogQueryResultUtils.java | 4 +- .../reasoner/vlog/VLogQueryTest.java | 4 +- .../reasoner/vlog}/VLogReasonerBasics.java | 8 +- .../vlog}/VLogReasonerCombinedInputs.java | 9 +- .../reasoner/vlog}/VLogReasonerCsvInput.java | 10 +- .../reasoner/vlog}/VLogReasonerCsvOutput.java | 4 +- .../reasoner/vlog}/VLogReasonerNegation.java | 8 +- .../reasoner/vlog}/VLogReasonerRdfInput.java | 10 +- .../vlog}/VLogReasonerSparqlInput.java | 9 +- .../reasoner/vlog}/VLogReasonerStateTest.java | 21 +- .../VLogReasonerWriteInferencesTest.java | 43 ++-- .../reasoner/vlog/VLogTermNamesTest.java | 4 +- .../vlog}/VLogToModelConverterTest.java | 8 +- 137 files changed, 951 insertions(+), 494 deletions(-) create mode 100644 rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java rename rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/{VLogDataSource.java => ReasonerDataSource.java} (61%) create mode 100644 rulewerk-vlog/LICENSE.txt create mode 100644 rulewerk-vlog/pom.xml rename {rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog}/ModelToVLogConverter.java (98%) rename {rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog}/TermToVLogConverter.java (96%) create mode 100644 rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java rename {rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog}/VLogKnowledgeBase.java (95%) rename {rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog}/VLogQueryResultIterator.java (95%) rename {rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog}/VLogReasoner.java (98%) rename {rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog}/VLogToModelConverter.java (95%) rename {rulewerk-core => rulewerk-vlog}/src/test/data/input/binaryFacts.csv (100%) rename {rulewerk-core => rulewerk-vlog}/src/test/data/input/constantD.csv (100%) rename {rulewerk-core => rulewerk-vlog}/src/test/data/input/empty.csv (100%) rename {rulewerk-core => rulewerk-vlog}/src/test/data/input/invalidFormatNtFacts.nt (100%) rename {rulewerk-core => rulewerk-vlog}/src/test/data/input/ternaryFacts.nt (100%) rename {rulewerk-core => rulewerk-vlog}/src/test/data/input/ternaryFactsZipped.nt.gz (100%) rename {rulewerk-core => rulewerk-vlog}/src/test/data/input/unaryFacts.csv (100%) rename {rulewerk-core => rulewerk-vlog}/src/test/data/input/unaryFactsCD.csv (100%) rename {rulewerk-core => rulewerk-vlog}/src/test/data/input/unaryFactsZipped.csv.gz (100%) rename {rulewerk-core => rulewerk-vlog}/src/test/data/output/.keep (100%) create mode 100644 rulewerk-vlog/src/test/data/output/binaryFacts.csv create mode 100644 rulewerk-vlog/src/test/data/output/exclude_blanks.csv create mode 100644 rulewerk-vlog/src/test/data/output/include_blanks.csv create mode 100644 rulewerk-vlog/src/test/data/output/unaryFacts.csv rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/AddDataSourceTest.java (98%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/AnswerQueryTest.java (99%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/CsvFileDataSourceTest.java (76%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk}/reasoner/vlog/ExportQueryResultToCsvFileTest.java (93%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/FileDataSourceTestUtils.java (96%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/GeneratedAnonymousIndividualsTest.java (98%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk}/reasoner/vlog/LargeAritiesTest.java (98%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/LoggingTest.java (95%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/ModelToVLogConverterTest.java (99%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/QueryAnswerCountTest.java (99%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/QueryAnsweringCorrectnessTest.java (99%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/QueryResultsUtils.java (95%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/RdfFileDataSourceTest.java (70%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/ReasonerTimeoutTest.java (94%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/SparqlQueryResultDataSourceTest.java (58%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk}/reasoner/vlog/StratifiedNegationTest.java (97%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk}/reasoner/vlog/VLogDataFromCsvFileTest.java (97%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk}/reasoner/vlog/VLogDataFromMemoryTest.java (99%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk}/reasoner/vlog/VLogDataFromRdfFileTest.java (96%) create mode 100644 rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitorTest.java rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk}/reasoner/vlog/VLogExpressions.java (97%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk}/reasoner/vlog/VLogQueryResultUtils.java (95%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk}/reasoner/vlog/VLogQueryTest.java (97%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/VLogReasonerBasics.java (93%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/VLogReasonerCombinedInputs.java (97%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/VLogReasonerCsvInput.java (95%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/VLogReasonerCsvOutput.java (98%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/VLogReasonerNegation.java (97%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/VLogReasonerRdfInput.java (95%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/VLogReasonerSparqlInput.java (97%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/VLogReasonerStateTest.java (96%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/VLogReasonerWriteInferencesTest.java (97%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk}/reasoner/vlog/VLogTermNamesTest.java (98%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/VLogToModelConverterTest.java (97%) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 24f38e9e0..d0408e336 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -11,6 +11,11 @@ Breaking changes: `reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral(queryString)).getCount()` * The `FileDataSource` constructor and those of derived classes now take the path to a file instead of `File` object. +* The VLog backend has been moved to a new `rulewerk-vlog` module, + changing several import paths. `Reasoner.getInstance()` now takes a + mandatory argument, a function taking a `KnowledgeBase` and + returning a `Reasoner` instance. Previous behaviour can be obtained + by using `Reasoner.getInstance(VLogReasoner::new)`. New features: * Counting query answers is more efficient now, using `Reasoner.countQueryAnswers()` diff --git a/coverage/pom.xml b/coverage/pom.xml index 7b75d4ebd..c91db4c28 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -14,36 +14,41 @@ coverage - - org.semanticweb.rulewerk - rulewerk-core - 0.6.0-SNAPSHOT - - - org.semanticweb.rulewerk - rulewerk-rdf - 0.6.0-SNAPSHOT - - - org.semanticweb.rulewerk - rulewerk-owlapi - 0.6.0-SNAPSHOT - - - org.semanticweb.rulewerk - rulewerk-graal - 0.6.0-SNAPSHOT - - - org.semanticweb.rulewerk - rulewerk-parser - 0.6.0-SNAPSHOT - - - org.semanticweb.rulewerk - rulewerk-client - 0.6.0-SNAPSHOT - + + ${project.groupId} + rulewerk-core + ${project.version} + + + ${project.groupId} + rulewerk-vlog + ${project.version} + + + ${project.groupId} + rulewerk-rdf + ${project.version} + + + ${project.groupId} + rulewerk-owlapi + ${project.version} + + + ${project.groupId} + rulewerk-graal + ${project.version} + + + ${project.groupId} + rulewerk-parser + ${project.version} + + + ${project.groupId} + rulewerk-client + ${project.version} + diff --git a/pom.xml b/pom.xml index 944c4f5fb..bbe69eb09 100644 --- a/pom.xml +++ b/pom.xml @@ -18,6 +18,7 @@ rulewerk-core + rulewerk-vlog rulewerk-rdf rulewerk-examples rulewerk-owlapi diff --git a/rulewerk-client/pom.xml b/rulewerk-client/pom.xml index cc051d591..a3760ec8b 100644 --- a/rulewerk-client/pom.xml +++ b/rulewerk-client/pom.xml @@ -31,6 +31,11 @@ rulewerk-parser ${project.version}
    + + ${project.groupId} + rulewerk-vlog + ${project.version} + org.slf4j slf4j-log4j12 diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/ClientUtils.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/ClientUtils.java index aca4cd136..4b7d94e48 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/ClientUtils.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/ClientUtils.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResults.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResults.java index a36f0d85d..de472be40 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResults.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResults.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClient.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClient.java index 54de47a9b..ee48b9beb 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClient.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClient.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClientMaterialize.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClientMaterialize.java index 91a4d0cda..ba3d2ccdc 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClientMaterialize.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClientMaterialize.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -31,7 +31,7 @@ import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.LogLevel; import org.semanticweb.rulewerk.core.reasoner.Reasoner; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; import picocli.CommandLine.ArgGroup; diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveModel.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveModel.java index bcd23d052..03f98eff1 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveModel.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveModel.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResults.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResults.java index 9ca9bd4fb..1f84bb15a 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResults.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResults.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/pom.xml b/rulewerk-core/pom.xml index 35b7e5ea4..36a019c2c 100644 --- a/rulewerk-core/pom.xml +++ b/rulewerk-core/pom.xml @@ -14,52 +14,5 @@ jar Rulewerk Core Components - Core components of Rulewerk: reasoner and model - - - 1.3.3-snapshot - - - - - - ${project.groupId} - vlog-base - ${karmaresearch.vlog.version} - - - - - - development - - - - - - org.apache.maven.plugins - maven-install-plugin - 2.4 - - - initialize - - install-file - - - ${project.groupId} - vlog-base - ${karmaresearch.vlog.version} - jar - ./lib/jvlog-local.jar - - - - - - - - + Core components of Rulewerk: reasoner interface and model diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/IncompatiblePredicateArityException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/IncompatiblePredicateArityException.java index 28e22ce99..57f505120 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/IncompatiblePredicateArityException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/IncompatiblePredicateArityException.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/PrefixDeclarationException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/PrefixDeclarationException.java index eacafd6de..0e6515403 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/PrefixDeclarationException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/PrefixDeclarationException.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/ReasonerStateException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/ReasonerStateException.java index af961ffda..d87ef7fcd 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/ReasonerStateException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/ReasonerStateException.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkException.java index ea0eaca0e..5223c04bc 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkException.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkRuntimeException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkRuntimeException.java index 9ff8dca3a..93237f788 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkRuntimeException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkRuntimeException.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java index 2ec1c15bc..cde555c76 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java index d245da52a..4b3669226 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java index 892f2d33b..2bb4e72a7 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java index d1b62815a..1d465c4b1 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java index 11e42fb88..c9104a280 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImpl.java index cf7b69212..d4efc8496 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImpl.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImpl.java index e95d5cfaa..dc0892e78 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImpl.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java index 04741fb8e..da4bff697 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index b4c6d07cd..281c16cc6 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java index 6cadfa1af..7765364c6 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java index 5ea9ee0b1..e7a0245c2 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -24,6 +24,8 @@ import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; +import java.util.function.Function; +import java.util.function.Supplier; import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; @@ -35,10 +37,9 @@ import org.semanticweb.rulewerk.core.model.api.Rule; import org.semanticweb.rulewerk.core.model.api.TermType; import org.semanticweb.rulewerk.core.model.api.Variable; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; /** - * Interface that exposes the existential rule reasoning capabilities of VLog. + * Interface that exposes the (existential) rule reasoning capabilities of a Reasoner. *
    * The knowledge base of the reasoner can be loaded with explicit facts * and existential rules that would infer implicit facts trough @@ -76,11 +77,15 @@ public interface Reasoner extends AutoCloseable, KnowledgeBaseListener { /** * Factory method that to instantiate a Reasoner with an empty knowledge base. * - * @return a {@link VLogReasoner} instance. + * @return a {@link Reasoner} instance. */ - static Reasoner getInstance() { - final KnowledgeBase knowledgeBase = new KnowledgeBase(); - return new VLogReasoner(knowledgeBase); + static Reasoner getInstance(Function makeReasoner) { + return getInstance(makeReasoner, KnowledgeBase::new); + } + + static Reasoner getInstance(Function makeReasoner, Supplier makeKnowledgeBase) { + final KnowledgeBase knowledgeBase = makeKnowledgeBase.get(); + return makeReasoner.apply(knowledgeBase); } /** diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java index cb1dea326..3ee0a4574 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -77,4 +77,8 @@ public String getSyntacticRepresentation() { return Serializer.getString(this); } + @Override + public void accept(DataSourceConfigurationVisitor visitor) { + visitor.visit(this); + } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java new file mode 100644 index 000000000..82b3d11de --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java @@ -0,0 +1,31 @@ +package org.semanticweb.rulewerk.core.reasoner.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +public interface DataSourceConfigurationVisitor { + public void visit(CsvFileDataSource dataSource); + + public void visit(RdfFileDataSource dataSource); + + public void visit(SparqlQueryResultDataSource dataSource); + + public void visit(InMemoryDataSource dataSource); +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/EmptyQueryResultIterator.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/EmptyQueryResultIterator.java index 449a9dbe9..3c3df5d9b 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/EmptyQueryResultIterator.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/EmptyQueryResultIterator.java @@ -26,7 +26,7 @@ /** * Iterator that represents an empty query result. - * + * * @author Markus Kroetzsch * */ diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java index ec902eb00..cbdb10e61 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -38,10 +38,7 @@ * @author Irina Dragoste * */ -public abstract class FileDataSource extends VLogDataSource { - - private final static String DATASOURCE_TYPE_CONFIG_VALUE = "INMEMORY"; - +public abstract class FileDataSource implements ReasonerDataSource { private final File file; private final String filePath; private final String fileName; @@ -87,21 +84,6 @@ private String getValidExtension(final String fileName, final Iterable p return potentialExtension.get(); } - @Override - public final String toConfigString() { - final String configStringPattern = - - PREDICATE_NAME_CONFIG_LINE + - - DATASOURCE_TYPE_CONFIG_PARAM + "=" + DATASOURCE_TYPE_CONFIG_VALUE + "\n" + - - "EDB%1$d_param0=" + this.dirCanonicalPath + "\n" + - - "EDB%1$d_param1=" + this.fileNameWithoutExtension + "\n"; - - return configStringPattern; - } - public File getFile() { return this.file; } @@ -119,7 +101,7 @@ public String getName() { * * @return The canonical path to the parent directory where the file resides. */ - String getDirCanonicalPath() { + public String getDirCanonicalPath() { return this.dirCanonicalPath; } @@ -128,7 +110,7 @@ String getDirCanonicalPath() { * * @return the file basename without any extension. */ - String getFileNameWithoutExtension() { + public String getFileNameWithoutExtension() { return this.fileNameWithoutExtension; } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/InMemoryDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/InMemoryDataSource.java index c3033c8a6..72af91a42 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/InMemoryDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/InMemoryDataSource.java @@ -35,7 +35,7 @@ * @author Markus Kroetzsch * */ -public class InMemoryDataSource implements DataSource { +public class InMemoryDataSource implements ReasonerDataSource { String[][] data; int nextEmptyTuple = 0; @@ -57,6 +57,22 @@ public InMemoryDataSource(final int arity, final int initialCapacity) { this.data = new String[initialCapacity][arity]; } + /** + * Transforms a constant name in a format suitable for the + * reasoner. The default implementation assumes the VLog backend. + * @param constantName the name of the constant + * + * @return a transformed version of constantName that is suitable for the Reasoner. + */ + protected String transformConstantName(String constantName) { + if (!constantName.startsWith("\"") && constantName.contains(":")) { + // enclose IRIs with brackets + return "<" + constantName + ">"; + } + // it's either a datatype literal, or a relative IRI, leave it unchanged + return constantName; + } + /** * Adds a fact to this data source. The number of constant names must agree with * the arity of this data source. @@ -74,7 +90,7 @@ public void addTuple(final String... constantNames) { } this.data[this.nextEmptyTuple] = new String[this.arity]; for (int i = 0; i < this.arity; i++) { - this.data[this.nextEmptyTuple][i] = TermToVLogConverter.getVLogNameForConstantName(constantNames[i]); + this.data[this.nextEmptyTuple][i] = transformConstantName(constantNames[i]); } this.nextEmptyTuple++; } @@ -105,4 +121,9 @@ public String getSyntacticRepresentation() { } return sb.toString(); } + + @Override + public void accept(DataSourceConfigurationVisitor visitor) { + visitor.visit(this); + } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountImpl.java index edd6b44ca..c433758ee 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountImpl.java @@ -1,8 +1,5 @@ package org.semanticweb.rulewerk.core.reasoner.implementation; -import org.semanticweb.rulewerk.core.reasoner.Correctness; -import org.semanticweb.rulewerk.core.reasoner.QueryAnswerCount; - /*- * #%L * Rulewerk Core Components @@ -23,6 +20,9 @@ * #L% */ +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.QueryAnswerCount; + public class QueryAnswerCountImpl implements QueryAnswerCount { final private Correctness correctness; @@ -30,14 +30,14 @@ public class QueryAnswerCountImpl implements QueryAnswerCount { /** * Constructor of QueryAnswerSize - * + * * @param correctness of the evaluated query. See {@link Correctness}. - * + * * @param size number of query answers, i.e. number of facts in the * extension of the query. */ - QueryAnswerCountImpl(Correctness correctness, long size) { + public QueryAnswerCountImpl(Correctness correctness, long size) { this.correctness = correctness; this.count = size; } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java index b143f7b4f..7d8a06f24 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java @@ -26,15 +26,15 @@ import org.semanticweb.rulewerk.core.model.api.Term; /** - * Implements {@link QueryResult}s. + * Implements {@link QueryResult}s. * @author Irina Dragoste * */ -final class QueryResultImpl implements QueryResult { +public final class QueryResultImpl implements QueryResult { private final List terms; - QueryResultImpl(List terms) { + public QueryResultImpl(List terms) { this.terms = terms; } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java index 265db485c..29a3f327f 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -80,4 +80,9 @@ public String getSyntacticRepresentation() { public Optional getRequiredArity() { return Optional.of(3); } + + @Override + public void accept(DataSourceConfigurationVisitor visitor) { + visitor.visit(this); + } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ReasonerDataSource.java similarity index 61% rename from rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogDataSource.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ReasonerDataSource.java index 68eb58133..57c6e1dee 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ReasonerDataSource.java @@ -23,22 +23,14 @@ import org.semanticweb.rulewerk.core.model.api.DataSource; /** - * Abstract base class for VLog-specific data sources. - * - * @author Markus Kroetzsch - * + * An interface for DataSources that can be used with a Reasoner. */ -public abstract class VLogDataSource implements DataSource { - - public static final String PREDICATE_NAME_CONFIG_LINE = "EDB%1$d_predname=%2$s\n"; - public static final String DATASOURCE_TYPE_CONFIG_PARAM = "EDB%1$d_type"; - +public interface ReasonerDataSource extends DataSource { /** - * Constructs a String representation of the data source. + * Accept a {@link DataSourceConfigurationVisitor} to configure a + * reasoner to load this data source. * - * @return a String representation of the data source configuration for a - * certain predicate. + * @param visitor the visitor. */ - public abstract String toConfigString(); - + public void accept(DataSourceConfigurationVisitor visitor); } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java index f60081c15..b0bc00877 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java index 0015bece5..cdc8723d2 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -36,9 +36,9 @@ * @author Irina Dragoste * */ -public class SparqlQueryResultDataSource extends VLogDataSource { +public class SparqlQueryResultDataSource implements ReasonerDataSource { + - private static final String DATASOURCE_TYPE_CONFIG_VALUE = "SPARQL"; private final URL endpoint; private final String queryVariables; @@ -108,20 +108,20 @@ public String getQueryVariables() { return this.queryVariables; } - @Override - public final String toConfigString() { - final String configStringPattern = + // @Override + // public final String toConfigString() { + // final String configStringPattern = - PREDICATE_NAME_CONFIG_LINE + + // PREDICATE_NAME_CONFIG_LINE + - DATASOURCE_TYPE_CONFIG_PARAM + "=" + DATASOURCE_TYPE_CONFIG_VALUE + "\n" + + // DATASOURCE_TYPE_CONFIG_PARAM + "=" + DATASOURCE_TYPE_CONFIG_VALUE + "\n" + - "EDB%1$d_param0=" + this.endpoint + "\n" + "EDB%1$d_param1=" + this.queryVariables + "\n" + + // "EDB%1$d_param0=" + this.endpoint + "\n" + "EDB%1$d_param1=" + this.queryVariables + "\n" + - "EDB%1$d_param2=" + this.queryBody + "\n"; + // "EDB%1$d_param2=" + this.queryBody + "\n"; - return configStringPattern; - } + // return configStringPattern; + // } static String getQueryVariablesList(LinkedHashSet queryVariables) { final StringBuilder sb = new StringBuilder(); @@ -177,4 +177,9 @@ public String getSyntacticRepresentation() { return Serializer.getString(this); } + @Override + public void accept(DataSourceConfigurationVisitor visitor) { + visitor.visit(this); + } + } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java index 310c24715..dfd658443 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java index 9f06ee6d2..812385669 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java index 15c633f86..d989d1a9f 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImplTest.java index 724a41064..38d57c214 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImplTest.java @@ -32,7 +32,6 @@ import org.semanticweb.rulewerk.core.model.api.QueryResult; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.reasoner.implementation.QueryResultImpl; public class QueryResultImplTest { diff --git a/rulewerk-examples/pom.xml b/rulewerk-examples/pom.xml index 216564049..84344dd75 100644 --- a/rulewerk-examples/pom.xml +++ b/rulewerk-examples/pom.xml @@ -41,6 +41,11 @@ ${project.groupId} rulewerk-parser ${project.version} +
    + + ${project.groupId} + rulewerk-vlog + ${project.version} org.slf4j diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CompareWikidataDBpedia.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CompareWikidataDBpedia.java index 85f9e8b3b..d3249c93d 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CompareWikidataDBpedia.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CompareWikidataDBpedia.java @@ -24,7 +24,7 @@ import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CountingTriangles.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CountingTriangles.java index b44afdbf8..7d9cbb509 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CountingTriangles.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CountingTriangles.java @@ -25,7 +25,7 @@ import java.io.IOException; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/DoidExample.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/DoidExample.java index b80163fb4..f903a5207 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/DoidExample.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/DoidExample.java @@ -28,7 +28,7 @@ import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.LogLevel; import org.semanticweb.rulewerk.core.reasoner.Reasoner; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java index 5149abc41..a0801f415 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java index 0647c1edc..591261fb1 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java @@ -27,7 +27,7 @@ import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/SimpleReasoningExample.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/SimpleReasoningExample.java index 28e870fa6..3f0f1ab6e 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/SimpleReasoningExample.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/SimpleReasoningExample.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -24,7 +24,7 @@ import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromCsvFile.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromCsvFile.java index 0b76c4bb3..4aea67362 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromCsvFile.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromCsvFile.java @@ -26,7 +26,7 @@ import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; import org.semanticweb.rulewerk.examples.ExamplesUtils; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromRdfFile.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromRdfFile.java index 2aa2c02a2..50770072d 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromRdfFile.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromRdfFile.java @@ -27,7 +27,7 @@ import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; import org.semanticweb.rulewerk.examples.ExamplesUtils; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromSparqlQueryResults.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromSparqlQueryResults.java index 8eb7a2a8f..2f74a96fa 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromSparqlQueryResults.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromSparqlQueryResults.java @@ -39,6 +39,7 @@ import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; import org.semanticweb.rulewerk.examples.ExamplesUtils; /** @@ -46,7 +47,7 @@ * a remote database endpoint, using {@link SparqlQueryResultDataSource}. In * this example, we will query Wikidata for titles of publications that have * authors who have children together. - * + * * @author Irina Dragoste * */ @@ -120,7 +121,7 @@ public static void main(final String[] args) throws IOException { */ final Predicate queryPredicate = Expressions.makePredicate("publicationParents", 3); - try (Reasoner reasoner = Reasoner.getInstance()) { + try (Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { final KnowledgeBase kb = reasoner.getKnowledgeBase(); /* diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java index 81039cf41..902fbf543 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -25,6 +25,7 @@ import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.LogLevel; import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; @@ -65,7 +66,7 @@ public class ConfigureReasonerLogging { public static void main(final String[] args) throws IOException, ParsingException { - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { final KnowledgeBase kb = reasoner.getKnowledgeBase(); /* exists z. B(?y, !z) :- A(?x, ?y) . */ kb.addStatements(RuleParser.parseRule("B(?Y, !Z) :- A(?X, ?Y) .")); diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/SkolemVsRestrictedChaseTermination.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/SkolemVsRestrictedChaseTermination.java index ae4042817..84b1291af 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/SkolemVsRestrictedChaseTermination.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/SkolemVsRestrictedChaseTermination.java @@ -25,7 +25,7 @@ import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.reasoner.Algorithm; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; import org.semanticweb.rulewerk.examples.ExamplesUtils; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java index 2dc50ca84..1f9621bd8 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -29,6 +29,7 @@ import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; import org.semanticweb.rulewerk.examples.ExamplesUtils; import org.semanticweb.rulewerk.graal.GraalConjunctiveQueryToRule; import org.semanticweb.rulewerk.graal.GraalToRulewerkModelConverter; @@ -101,7 +102,7 @@ public static void main(final String[] args) throws IOException { * the reasoner automatically. */ - try (Reasoner reasoner = Reasoner.getInstance()) { + try (Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { final KnowledgeBase kb = reasoner.getKnowledgeBase(); /* diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromGraal.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromGraal.java index b4f05470f..b5c39441e 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromGraal.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromGraal.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -27,7 +27,7 @@ import org.semanticweb.rulewerk.core.model.api.Rule; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; import org.semanticweb.rulewerk.examples.ExamplesUtils; import org.semanticweb.rulewerk.graal.GraalConjunctiveQueryToRule; import org.semanticweb.rulewerk.graal.GraalToRulewerkModelConverter; diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java index 971fedb8b..9bfac8c74 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -35,7 +35,7 @@ import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; import org.semanticweb.rulewerk.examples.DoidExample; import org.semanticweb.rulewerk.examples.ExamplesUtils; import org.semanticweb.rulewerk.graal.GraalToRulewerkModelConverter; diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/owlapi/OwlOntologyToRulesAndFacts.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/owlapi/OwlOntologyToRulesAndFacts.java index 5bcb6bea0..52815942e 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/owlapi/OwlOntologyToRulesAndFacts.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/owlapi/OwlOntologyToRulesAndFacts.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -38,7 +38,7 @@ import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; import org.semanticweb.rulewerk.examples.ExamplesUtils; import org.semanticweb.rulewerk.owlapi.OwlToRulesConverter; diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java index 77af29d19..79298dd2c 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -45,7 +45,7 @@ import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; import org.semanticweb.rulewerk.examples.ExamplesUtils; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; diff --git a/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConvertException.java b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConvertException.java index a99563a3f..836fa51e0 100644 --- a/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConvertException.java +++ b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConvertException.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToRulewerkModelConverter.java b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToRulewerkModelConverter.java index 5ab82d428..94fa0cd7c 100644 --- a/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToRulewerkModelConverter.java +++ b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToRulewerkModelConverter.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToRulewerkModelConverterTest.java b/rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToRulewerkModelConverterTest.java index b6c3cd88b..fb8fcc40f 100644 --- a/rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToRulewerkModelConverterTest.java +++ b/rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToRulewerkModelConverterTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java index 80390026e..b4ddbc3ee 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlFeatureNotSupportedException.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlFeatureNotSupportedException.java index 06f02adca..dcae928b1 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlFeatureNotSupportedException.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlFeatureNotSupportedException.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java index 802161334..f5c737dc2 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/pom.xml b/rulewerk-parser/pom.xml index 70df4a5b6..87da11c8d 100644 --- a/rulewerk-parser/pom.xml +++ b/rulewerk-parser/pom.xml @@ -14,7 +14,6 @@ rulewerk-parser Rulewerk Parser - http://maven.apache.org UTF-8 diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ConfigurableLiteralHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ConfigurableLiteralHandler.java index 5b8fddbdd..ff356ba3f 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ConfigurableLiteralHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ConfigurableLiteralHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java index 146faf39c..ae6c25251 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java index c61ec77c3..faa15a7db 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java @@ -11,9 +11,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java index 41a551a75..f6bc3b9d3 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParsingException.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParsingException.java index 802cafe03..f5890b9e8 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParsingException.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParsingException.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java index 7979f154f..67a66c9c4 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java index ee7a2ec79..259c26759 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java index 9c24f2ab1..ae227a9d1 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java index 62b5e246f..1ef7a4372 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java index a7878d797..54126cd0f 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java index 14c2bceb3..e72ae9dfb 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-rdf/pom.xml b/rulewerk-rdf/pom.xml index e8cb4b566..efed746b6 100644 --- a/rulewerk-rdf/pom.xml +++ b/rulewerk-rdf/pom.xml @@ -2,9 +2,9 @@ - + 4.0.0 - + org.semanticweb.rulewerk rulewerk-parent @@ -23,6 +23,12 @@ rulewerk-core ${project.version} + + ${project.groupId} + rulewerk-vlog + ${project.version} + test + org.openrdf.sesame diff --git a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java index ef732de3f..058228665 100644 --- a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java +++ b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java index 6297a8968..214d32d6e 100644 --- a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java @@ -44,7 +44,7 @@ import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; import org.semanticweb.rulewerk.core.reasoner.Reasoner; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; public class TestReasonOverRdfFacts { diff --git a/rulewerk-vlog/LICENSE.txt b/rulewerk-vlog/LICENSE.txt new file mode 100644 index 000000000..261eeb9e9 --- /dev/null +++ b/rulewerk-vlog/LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/rulewerk-vlog/pom.xml b/rulewerk-vlog/pom.xml new file mode 100644 index 000000000..7aa17b881 --- /dev/null +++ b/rulewerk-vlog/pom.xml @@ -0,0 +1,70 @@ + + + 4.0.0 + + + org.semanticweb.rulewerk + rulewerk-parent + 0.6.0-SNAPSHOT + + + rulewerk-vlog + jar + + Rulewerk VLog Reasoner Support + and model + + + 1.3.3-snapshot + + + + + ${project.groupId} + rulewerk-core + ${project.version} + + + + ${project.groupId} + vlog-base + ${karmaresearch.vlog.version} + + + + + + development + + + + + + org.apache.maven.plugins + maven-install-plugin + 2.4 + + + initialize + + install-file + + + ${project.groupId} + vlog-base + ${karmaresearch.vlog.version} + jar + ./lib/jvlog-local.jar + + + + + + + + + diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/ModelToVLogConverter.java similarity index 98% rename from rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java rename to rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/ModelToVLogConverter.java index ae7f59597..3fe63160a 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/ModelToVLogConverter.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% @@ -39,7 +39,7 @@ * Utility class with static methods for converting from VLog API model objects * ({@code org.semanticweb.rulewerk.core.model}) to internal VLog model objects * ({@code karmaresearch.vlog}). - * + * * @author Irina Dragoste * */ @@ -97,7 +97,7 @@ static String[] toVLogFactTuple(final Fact fact) { /** * Internal String representation that uniquely identifies a {@link Predicate}. - * + * * @param predicate a {@link Predicate} * @return String representation corresponding to given predicate name and * arity. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java similarity index 96% rename from rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java rename to rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java index 64bc83db3..90496ff76 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java @@ -1,17 +1,17 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -30,6 +30,7 @@ import org.semanticweb.rulewerk.core.model.api.TermVisitor; import org.semanticweb.rulewerk.core.model.api.UniversalVariable; import org.semanticweb.rulewerk.core.model.implementation.RenamedNamedNull; +import org.semanticweb.rulewerk.core.reasoner.implementation.Skolemization; /** * A visitor that converts {@link Term}s of different types to corresponding diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java new file mode 100644 index 000000000..dd4ac05f9 --- /dev/null +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java @@ -0,0 +1,73 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/* + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.reasoner.implementation.DataSourceConfigurationVisitor; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; + +public class VLogDataSourceConfigurationVisitor implements DataSourceConfigurationVisitor { + private String configString = null; + + private static final String PREDICATE_NAME_CONFIG_LINE = "EDB%1$d_predname=%2$s\n"; + private static final String DATASOURCE_TYPE_CONFIG_PARAM = "EDB%1$d_type"; + private final static String FILE_DATASOURCE_TYPE_CONFIG_VALUE = "INMEMORY"; + private static final String SPARQL_DATASOURCE_TYPE_CONFIG_VALUE = "SPARQL"; + + public String getConfigString() { + return configString; + } + + protected void setFileConfigString(FileDataSource dataSource) { + this.configString = + PREDICATE_NAME_CONFIG_LINE + + DATASOURCE_TYPE_CONFIG_PARAM + "=" + FILE_DATASOURCE_TYPE_CONFIG_VALUE + "\n" + + "EDB%1$d_param0=" + dataSource.getDirCanonicalPath() + "\n" + + "EDB%1$d_param1=" + dataSource.getFileNameWithoutExtension() + "\n"; + } + + @Override + public void visit(CsvFileDataSource dataSource) { + setFileConfigString(dataSource); + } + + @Override + public void visit(RdfFileDataSource dataSource) { + setFileConfigString(dataSource); + } + + @Override + public void visit(SparqlQueryResultDataSource dataSource) { + this.configString = + PREDICATE_NAME_CONFIG_LINE + + DATASOURCE_TYPE_CONFIG_PARAM + "=" + SPARQL_DATASOURCE_TYPE_CONFIG_VALUE + "\n" + + "EDB%1$d_param0=" + dataSource.getEndpoint() + "\n" + "EDB%1$d_param1=" + dataSource.getQueryVariables() + "\n" + + "EDB%1$d_param2=" + dataSource.getQueryBody() + "\n"; + } + + @Override + public void visit(InMemoryDataSource dataSource) { + this.configString = null; + } +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogKnowledgeBase.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBase.java similarity index 95% rename from rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogKnowledgeBase.java rename to rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBase.java index a7e7da9aa..3bd57a52f 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogKnowledgeBase.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBase.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% @@ -46,6 +46,7 @@ import org.semanticweb.rulewerk.core.model.implementation.RuleImpl; import org.semanticweb.rulewerk.core.model.implementation.UniversalVariableImpl; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.implementation.ReasonerDataSource; /** * Class for organizing a Knowledge Base using vLog-specific data structures. @@ -120,9 +121,11 @@ int addDataSourceConfigurationString(final DataSource dataSource, final Predicat int newDataSourceIndex = dataSourceIndex; if (dataSource != null) { - if (dataSource instanceof VLogDataSource) { - final VLogDataSource vLogDataSource = (VLogDataSource) dataSource; - final String configString = vLogDataSource.toConfigString(); + if (dataSource instanceof ReasonerDataSource) { + final ReasonerDataSource reasonerDataSource = (ReasonerDataSource) dataSource; + final VLogDataSourceConfigurationVisitor visitor = new VLogDataSourceConfigurationVisitor(); + reasonerDataSource.accept(visitor); + final String configString = visitor.getConfigString(); if (configString != null) { formatter.format(configString, dataSourceIndex, ModelToVLogConverter.toVLogPredicate(predicate)); newDataSourceIndex++; diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogQueryResultIterator.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultIterator.java similarity index 95% rename from rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogQueryResultIterator.java rename to rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultIterator.java index 1db2cc922..35dc7f75c 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogQueryResultIterator.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultIterator.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /* * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% @@ -30,7 +30,7 @@ /** * Iterates trough all answers to a query. An answer to a query is a * {@link QueryResult}. Each query answer is distinct. - * + * * @author Irina Dragoste * */ diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java similarity index 98% rename from rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java rename to rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java index 00b774ef0..c487cc102 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java @@ -1,17 +1,17 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /* * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -55,6 +55,9 @@ import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.core.reasoner.ReasonerState; import org.semanticweb.rulewerk.core.reasoner.RuleRewriteStrategy; +import org.semanticweb.rulewerk.core.reasoner.implementation.EmptyQueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.QueryAnswerCountImpl; +import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverter.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java similarity index 95% rename from rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverter.java rename to rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java index d90331ccc..581e13368 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverter.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /* * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% @@ -31,12 +31,14 @@ import org.semanticweb.rulewerk.core.model.implementation.LanguageStringConstantImpl; import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.reasoner.implementation.QueryAnswerCountImpl; +import org.semanticweb.rulewerk.core.reasoner.implementation.QueryResultImpl; /** * Utility class with static methods for converting from VLog internal model * ({@code karmaresearch.vlog} objects) to VLog API model * ({@code org.semanticweb.rulewerk.core.model.api}) objects. - * + * * @author Irina Dragoste * */ @@ -45,7 +47,7 @@ class VLogToModelConverter { /** * Converts internal VLog query results (represented as arrays of * {@link karmaresearch.vlog.Term}s) into VLog model API QueryResults. - * + * * @param vLogQueryResult an array of terms that represent an answer to a query. * @return a QueryResult containing the corresponding {@code vLogQueryResult} as * a List of {@link Term}s. @@ -57,7 +59,7 @@ static QueryResult toQueryResult(karmaresearch.vlog.Term[] vLogQueryResult) { /** * Converts an array of internal VLog terms ({@link karmaresearch.vlog.Term}) * into the corresponding list of VLog API model {@link Term}. - * + * * @param vLogTerms input terms array, to be converted to a list of * corresponding {@link Term}s. * @return list of {@link Term}s, where each element corresponds to the element @@ -74,7 +76,7 @@ static List toTermList(karmaresearch.vlog.Term[] vLogTerms) { /** * Converts an internal VLog term ({@link karmaresearch.vlog.Term}) to a VLog * API model {@link Term} of the same type and name. - * + * * @param vLogTerm term to be converted * @return a ({@link karmaresearch.vlog.Term}) with the same name as given * {@code vLogTerm} and of the corresponding type. @@ -96,7 +98,7 @@ static Term toTerm(karmaresearch.vlog.Term vLogTerm) { /** * Creates a {@link Constant} from the given VLog constant name. - * + * * @param vLogConstantName the string name used by VLog * @return {@link Constant} object */ diff --git a/rulewerk-core/src/test/data/input/binaryFacts.csv b/rulewerk-vlog/src/test/data/input/binaryFacts.csv similarity index 100% rename from rulewerk-core/src/test/data/input/binaryFacts.csv rename to rulewerk-vlog/src/test/data/input/binaryFacts.csv diff --git a/rulewerk-core/src/test/data/input/constantD.csv b/rulewerk-vlog/src/test/data/input/constantD.csv similarity index 100% rename from rulewerk-core/src/test/data/input/constantD.csv rename to rulewerk-vlog/src/test/data/input/constantD.csv diff --git a/rulewerk-core/src/test/data/input/empty.csv b/rulewerk-vlog/src/test/data/input/empty.csv similarity index 100% rename from rulewerk-core/src/test/data/input/empty.csv rename to rulewerk-vlog/src/test/data/input/empty.csv diff --git a/rulewerk-core/src/test/data/input/invalidFormatNtFacts.nt b/rulewerk-vlog/src/test/data/input/invalidFormatNtFacts.nt similarity index 100% rename from rulewerk-core/src/test/data/input/invalidFormatNtFacts.nt rename to rulewerk-vlog/src/test/data/input/invalidFormatNtFacts.nt diff --git a/rulewerk-core/src/test/data/input/ternaryFacts.nt b/rulewerk-vlog/src/test/data/input/ternaryFacts.nt similarity index 100% rename from rulewerk-core/src/test/data/input/ternaryFacts.nt rename to rulewerk-vlog/src/test/data/input/ternaryFacts.nt diff --git a/rulewerk-core/src/test/data/input/ternaryFactsZipped.nt.gz b/rulewerk-vlog/src/test/data/input/ternaryFactsZipped.nt.gz similarity index 100% rename from rulewerk-core/src/test/data/input/ternaryFactsZipped.nt.gz rename to rulewerk-vlog/src/test/data/input/ternaryFactsZipped.nt.gz diff --git a/rulewerk-core/src/test/data/input/unaryFacts.csv b/rulewerk-vlog/src/test/data/input/unaryFacts.csv similarity index 100% rename from rulewerk-core/src/test/data/input/unaryFacts.csv rename to rulewerk-vlog/src/test/data/input/unaryFacts.csv diff --git a/rulewerk-core/src/test/data/input/unaryFactsCD.csv b/rulewerk-vlog/src/test/data/input/unaryFactsCD.csv similarity index 100% rename from rulewerk-core/src/test/data/input/unaryFactsCD.csv rename to rulewerk-vlog/src/test/data/input/unaryFactsCD.csv diff --git a/rulewerk-core/src/test/data/input/unaryFactsZipped.csv.gz b/rulewerk-vlog/src/test/data/input/unaryFactsZipped.csv.gz similarity index 100% rename from rulewerk-core/src/test/data/input/unaryFactsZipped.csv.gz rename to rulewerk-vlog/src/test/data/input/unaryFactsZipped.csv.gz diff --git a/rulewerk-core/src/test/data/output/.keep b/rulewerk-vlog/src/test/data/output/.keep similarity index 100% rename from rulewerk-core/src/test/data/output/.keep rename to rulewerk-vlog/src/test/data/output/.keep diff --git a/rulewerk-vlog/src/test/data/output/binaryFacts.csv b/rulewerk-vlog/src/test/data/output/binaryFacts.csv new file mode 100644 index 000000000..bcaabc2bc --- /dev/null +++ b/rulewerk-vlog/src/test/data/output/binaryFacts.csv @@ -0,0 +1,2 @@ +c1,c2 +c3,c4 diff --git a/rulewerk-vlog/src/test/data/output/exclude_blanks.csv b/rulewerk-vlog/src/test/data/output/exclude_blanks.csv new file mode 100644 index 000000000..e69de29bb diff --git a/rulewerk-vlog/src/test/data/output/include_blanks.csv b/rulewerk-vlog/src/test/data/output/include_blanks.csv new file mode 100644 index 000000000..e502cf529 --- /dev/null +++ b/rulewerk-vlog/src/test/data/output/include_blanks.csv @@ -0,0 +1,2 @@ +c,1_2_0 +c,1_3_0 diff --git a/rulewerk-vlog/src/test/data/output/unaryFacts.csv b/rulewerk-vlog/src/test/data/output/unaryFacts.csv new file mode 100644 index 000000000..d0aaf976a --- /dev/null +++ b/rulewerk-vlog/src/test/data/output/unaryFacts.csv @@ -0,0 +1,2 @@ +c1 +c2 diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/AddDataSourceTest.java similarity index 98% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/AddDataSourceTest.java index 2739ae08c..ed1aa4f23 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/AddDataSourceTest.java @@ -1,20 +1,20 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -41,6 +41,7 @@ import org.semanticweb.rulewerk.core.reasoner.Correctness; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; public class AddDataSourceTest { diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AnswerQueryTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/AnswerQueryTest.java similarity index 99% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AnswerQueryTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/AnswerQueryTest.java index 9de6276f3..30d844c3c 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AnswerQueryTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/AnswerQueryTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -7,7 +7,7 @@ /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/CsvFileDataSourceTest.java similarity index 76% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/CsvFileDataSourceTest.java index e94173d20..d85f73d09 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/CsvFileDataSourceTest.java @@ -1,17 +1,17 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -26,7 +26,9 @@ import java.io.IOException; import org.junit.Test; -import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSourceTestUtils; + +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; public class CsvFileDataSourceTest { @@ -54,19 +56,6 @@ public void testConstructor() throws IOException { FileDataSourceTestUtils.testConstructor(zippedCsvFileDataSource, new File(gzFile).getName(), dirCanonicalPath, "file"); } - @Test - public void testToConfigString() throws IOException { - final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(csvFile); - final CsvFileDataSource zippedCsvFileDataSource = new CsvFileDataSource(gzFile); - - final String expectedDirCanonicalPath = new File(FileDataSourceTestUtils.INPUT_FOLDER).getCanonicalPath(); - final String expectedConfigString = "EDB%1$d_predname=%2$s\n" + "EDB%1$d_type=INMEMORY\n" + "EDB%1$d_param0=" - + expectedDirCanonicalPath + "\n" + "EDB%1$d_param1=file\n"; - - assertEquals(expectedConfigString, unzippedCsvFileDataSource.toConfigString()); - assertEquals(expectedConfigString, zippedCsvFileDataSource.toConfigString()); - } - @Test public void testNoParentDir() throws IOException { final FileDataSource fileDataSource = new CsvFileDataSource("file.csv"); diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/ExportQueryResultToCsvFileTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ExportQueryResultToCsvFileTest.java similarity index 93% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/ExportQueryResultToCsvFileTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ExportQueryResultToCsvFileTest.java index 89ad3228d..3619ce9c8 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/ExportQueryResultToCsvFileTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ExportQueryResultToCsvFileTest.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.vlog; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% @@ -27,7 +27,6 @@ import java.util.List; import org.junit.Test; -import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSourceTestUtils; import karmaresearch.vlog.Atom; import karmaresearch.vlog.EDBConfigurationException; diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/FileDataSourceTestUtils.java similarity index 96% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/FileDataSourceTestUtils.java index dc027e2a0..6a0819ed3 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/FileDataSourceTestUtils.java @@ -1,11 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% @@ -23,6 +20,9 @@ * #L% */ +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; + import java.io.FileReader; import java.io.IOException; import java.io.Reader; @@ -38,6 +38,7 @@ import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; /** * Utility class for reading from and writing to data source files. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/GeneratedAnonymousIndividualsTest.java similarity index 98% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/GeneratedAnonymousIndividualsTest.java index 92512fdd6..87f5eb910 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/GeneratedAnonymousIndividualsTest.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/LargeAritiesTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/LargeAritiesTest.java similarity index 98% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/LargeAritiesTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/LargeAritiesTest.java index 1ee33c9eb..5497489b4 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/LargeAritiesTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/LargeAritiesTest.java @@ -1,10 +1,10 @@ -package org.semanticweb.rulewerk.core.reasoner.vlog; +package org.semanticweb.rulewerk.reasoner.vlog; import static org.junit.Assert.assertArrayEquals; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/LoggingTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/LoggingTest.java similarity index 95% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/LoggingTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/LoggingTest.java index 269cb56cc..598ea90c9 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/LoggingTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/LoggingTest.java @@ -1,17 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -import java.io.BufferedReader; -import java.io.File; -import java.io.FileNotFoundException; -import java.io.FileReader; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% @@ -29,6 +20,14 @@ * #L% */ +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileReader; import java.io.IOException; import java.util.Arrays; @@ -40,7 +39,9 @@ import org.semanticweb.rulewerk.core.model.api.Rule; import org.semanticweb.rulewerk.core.model.api.Variable; import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.LogLevel; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; public class LoggingTest { @@ -108,7 +109,7 @@ public void testSetLogFileInexistent() throws IOException { @Test(expected = NullPointerException.class) public void testSetLogLevelNull() { - try (final Reasoner instance = Reasoner.getInstance()) { + try (final Reasoner instance = Reasoner.getInstance(VLogReasoner::new)) { instance.setLogLevel(null); } } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ModelToVLogConverterTest.java similarity index 99% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ModelToVLogConverterTest.java index b88e5e3ef..653cb5401 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ModelToVLogConverterTest.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryAnswerCountTest.java similarity index 99% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryAnswerCountTest.java index 2e7e0c29f..7b2d519f6 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryAnswerCountTest.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryAnsweringCorrectnessTest.java similarity index 99% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryAnsweringCorrectnessTest.java index ffbada38f..d9cc90601 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryAnsweringCorrectnessTest.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% @@ -46,6 +46,7 @@ import org.semanticweb.rulewerk.core.reasoner.Correctness; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; public class QueryAnsweringCorrectnessTest { diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultsUtils.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryResultsUtils.java similarity index 95% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultsUtils.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryResultsUtils.java index 81ee7716e..32af07014 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultsUtils.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryResultsUtils.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/RdfFileDataSourceTest.java similarity index 70% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/RdfFileDataSourceTest.java index 70e8b4657..f16ab06f2 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/RdfFileDataSourceTest.java @@ -1,19 +1,19 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; import static org.junit.Assert.assertEquals; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -26,7 +26,9 @@ import java.io.IOException; import org.junit.Test; -import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSourceTestUtils; + +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; public class RdfFileDataSourceTest { @@ -53,18 +55,4 @@ public void testConstructor() throws IOException { FileDataSourceTestUtils.testConstructor(unzippedRdfFileDataSource, new File(unzippedRdfFile).getName(), dirCanonicalPath, "file"); FileDataSourceTestUtils.testConstructor(zippedRdfFileDataSource, new File(zippedRdfFile).getName(), dirCanonicalPath, "file"); } - - @Test - public void testToConfigString() throws IOException { - final RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(unzippedRdfFile); - final RdfFileDataSource zippedRdfFileDataSource = new RdfFileDataSource(zippedRdfFile); - - final String expectedDirCanonicalPath = new File(FileDataSourceTestUtils.INPUT_FOLDER).getCanonicalPath(); - final String expectedConfigString = "EDB%1$d_predname=%2$s\n" + "EDB%1$d_type=INMEMORY\n" + "EDB%1$d_param0=" - + expectedDirCanonicalPath + "\n" + "EDB%1$d_param1=file\n"; - - assertEquals(expectedConfigString, unzippedRdfFileDataSource.toConfigString()); - assertEquals(expectedConfigString, zippedRdfFileDataSource.toConfigString()); - } - } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/ReasonerTimeoutTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ReasonerTimeoutTest.java similarity index 94% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/ReasonerTimeoutTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ReasonerTimeoutTest.java index b8e28c53d..415e03ccf 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/ReasonerTimeoutTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ReasonerTimeoutTest.java @@ -1,11 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner; - -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% @@ -23,6 +20,9 @@ * #L% */ +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; @@ -38,7 +38,9 @@ import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.Rule; import org.semanticweb.rulewerk.core.model.api.Variable; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.Algorithm; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.model.implementation.Expressions; /** @@ -46,7 +48,7 @@ * expected and terminates reasoning after the given {@link #timeout}. Results * are accepted within one second to account for setup and tear down of * reasoning resources. - * + * * @author Adrian Bielefeldt * */ @@ -117,7 +119,7 @@ public void setUp() { @Test(expected = IllegalArgumentException.class) public void testSetReasoningTimeout() { - try (final Reasoner reasoner = Reasoner.getInstance();) { + try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new);) { reasoner.setReasoningTimeout(-3); } } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/SparqlQueryResultDataSourceTest.java similarity index 58% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/SparqlQueryResultDataSourceTest.java index 9e9806113..81f6aaa92 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/SparqlQueryResultDataSourceTest.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% @@ -32,6 +32,8 @@ import org.junit.Test; import org.semanticweb.rulewerk.core.model.api.Variable; import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; public class SparqlQueryResultDataSourceTest { @@ -40,28 +42,6 @@ public class SparqlQueryResultDataSourceTest { public SparqlQueryResultDataSourceTest() throws MalformedURLException { } - @Test - public void testToStringSimpleConstructor() throws MalformedURLException { - final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(endpoint, "b,a", - "?a wdt:P22 ?b"); - final String expectedStringConfig = "EDB%1$d_predname=%2$s\n" + "EDB%1$d_type=SPARQL\n" - + "EDB%1$d_param0=http://query.wikidata.org/sparql\n" + "EDB%1$d_param1=b,a\n" - + "EDB%1$d_param2=?a wdt:P22 ?b\n"; - assertEquals(expectedStringConfig, dataSource.toConfigString()); - } - - @Test - public void testToStringList() throws MalformedURLException { - final LinkedHashSet queryVariables = new LinkedHashSet<>( - Arrays.asList(Expressions.makeUniversalVariable("b"), Expressions.makeUniversalVariable("a"))); - final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(endpoint, queryVariables, - "?a wdt:P22 ?b"); - final String expectedStringConfig = "EDB%1$d_predname=%2$s\n" + "EDB%1$d_type=SPARQL\n" - + "EDB%1$d_param0=http://query.wikidata.org/sparql\n" + "EDB%1$d_param1=b,a\n" - + "EDB%1$d_param2=?a wdt:P22 ?b\n"; - assertEquals(expectedStringConfig, dataSource.toConfigString()); - } - @Test(expected = IllegalArgumentException.class) public void testEmptyQueryBodyList() throws IOException { diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/StratifiedNegationTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/StratifiedNegationTest.java similarity index 97% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/StratifiedNegationTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/StratifiedNegationTest.java index ee9041fda..dcd9243f4 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/StratifiedNegationTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/StratifiedNegationTest.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.vlog; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% @@ -25,7 +25,6 @@ import static org.junit.Assert.assertTrue; import org.junit.Test; -import org.semanticweb.rulewerk.core.reasoner.vlog.VLogExpressions; import karmaresearch.vlog.Atom; import karmaresearch.vlog.EDBConfigurationException; @@ -43,7 +42,7 @@ public class StratifiedNegationTest { /** * P(x), Not(Q(x)) -> R(x) Q - EDB. - * + * * @throws EDBConfigurationException * @throws NotStartedException */ @@ -103,7 +102,7 @@ public void testSimpleInputNegation() /** * P(x), Not(Q(x)) -> R(x)
    * R-IDB. - * + * * @throws EDBConfigurationException * @throws NotStartedException */ @@ -152,7 +151,7 @@ public void testStratifiedNegationOnIDB() /** * P(x), Not(Q(x)) -> Q(x)
    * Q - IDB. - * + * * @throws EDBConfigurationException * @throws NotStartedException */ diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromCsvFileTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataFromCsvFileTest.java similarity index 97% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromCsvFileTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataFromCsvFileTest.java index afc81d080..7eae82b26 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromCsvFileTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataFromCsvFileTest.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.vlog; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% @@ -28,7 +28,6 @@ import java.util.List; import org.junit.Test; -import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSourceTestUtils; import karmaresearch.vlog.AlreadyStartedException; import karmaresearch.vlog.Atom; diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromMemoryTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataFromMemoryTest.java similarity index 99% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromMemoryTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataFromMemoryTest.java index fb2882349..080030601 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromMemoryTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataFromMemoryTest.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.vlog; +package org.semanticweb.rulewerk.reasoner.vlog; /* * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromRdfFileTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataFromRdfFileTest.java similarity index 96% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromRdfFileTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataFromRdfFileTest.java index a16b34c89..dd15fcca2 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromRdfFileTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataFromRdfFileTest.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.vlog; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% @@ -28,7 +28,6 @@ import java.util.List; import org.junit.Test; -import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSourceTestUtils; import karmaresearch.vlog.AlreadyStartedException; import karmaresearch.vlog.Atom; diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitorTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitorTest.java new file mode 100644 index 000000000..b4b860f58 --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitorTest.java @@ -0,0 +1,105 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; + +import java.io.File; +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.Arrays; +import java.util.LinkedHashSet; + +import org.junit.Test; + +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.ReasonerDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; + +public class VLogDataSourceConfigurationVisitorTest { + private final String csvFile = FileDataSourceTestUtils.INPUT_FOLDER + "file.csv"; + private final String unzippedRdfFile = FileDataSourceTestUtils.INPUT_FOLDER + "file.nt"; + private final String zippedRdfFile = FileDataSourceTestUtils.INPUT_FOLDER + "file.nt.gz"; + private final String gzFile = csvFile + ".gz"; + final URL endpoint = new URL("http://query.wikidata.org/sparql"); + + public VLogDataSourceConfigurationVisitorTest() throws MalformedURLException { + } + + @Test + public void visit_CsvFileDataSource_succeeds() throws IOException { + final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(csvFile); + final CsvFileDataSource zippedCsvFileDataSource = new CsvFileDataSource(gzFile); + + final String expectedDirCanonicalPath = new File(FileDataSourceTestUtils.INPUT_FOLDER).getCanonicalPath(); + final String expectedConfigString = "EDB%1$d_predname=%2$s\n" + "EDB%1$d_type=INMEMORY\n" + "EDB%1$d_param0=" + + expectedDirCanonicalPath + "\n" + "EDB%1$d_param1=file\n"; + + assertEquals(expectedConfigString, toConfigString(unzippedCsvFileDataSource)); + assertEquals(expectedConfigString, toConfigString(zippedCsvFileDataSource)); + } + + @Test + public void visit_RdfFileDataSource_succeeds() throws IOException { + final RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(unzippedRdfFile); + final RdfFileDataSource zippedRdfFileDataSource = new RdfFileDataSource(zippedRdfFile); + + final String expectedDirCanonicalPath = new File(FileDataSourceTestUtils.INPUT_FOLDER).getCanonicalPath(); + final String expectedConfigString = "EDB%1$d_predname=%2$s\n" + "EDB%1$d_type=INMEMORY\n" + "EDB%1$d_param0=" + + expectedDirCanonicalPath + "\n" + "EDB%1$d_param1=file\n"; + + assertEquals(expectedConfigString, toConfigString(unzippedRdfFileDataSource)); + assertEquals(expectedConfigString, toConfigString(zippedRdfFileDataSource)); + } + + @Test + public void visit_SparqlQueryResultDataSource_succeeds() throws MalformedURLException { + final SparqlQueryResultDataSource simpleDataSource = new SparqlQueryResultDataSource(endpoint, "b,a", + "?a wdt:P22 ?b"); + final LinkedHashSet queryVariables = new LinkedHashSet<>( + Arrays.asList(Expressions.makeUniversalVariable("b"), Expressions.makeUniversalVariable("a"))); + final SparqlQueryResultDataSource listDataSource = new SparqlQueryResultDataSource(endpoint, queryVariables, + "?a wdt:P22 ?b"); + final String expectedConfigString = "EDB%1$d_predname=%2$s\n" + "EDB%1$d_type=SPARQL\n" + + "EDB%1$d_param0=http://query.wikidata.org/sparql\n" + "EDB%1$d_param1=b,a\n" + + "EDB%1$d_param2=?a wdt:P22 ?b\n"; + assertEquals(expectedConfigString, toConfigString(simpleDataSource)); + assertEquals(expectedConfigString, toConfigString(listDataSource)); + } + + @Test + public void visit_InMemoryDataSource_returnsNull() { + final InMemoryDataSource inMemoryDataSource = new InMemoryDataSource(1, 1); + assertEquals(null, toConfigString(inMemoryDataSource)); + } + + private String toConfigString(ReasonerDataSource dataSource) { + VLogDataSourceConfigurationVisitor visitor = new VLogDataSourceConfigurationVisitor(); + dataSource.accept(visitor); + return visitor.getConfigString(); + } +} diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogExpressions.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogExpressions.java similarity index 97% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogExpressions.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogExpressions.java index d87a36190..0aee3638f 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogExpressions.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogExpressions.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.vlog; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryResultUtils.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultUtils.java similarity index 95% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryResultUtils.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultUtils.java index a23dae441..d192add94 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryResultUtils.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultUtils.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.vlog; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryTest.java similarity index 97% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryTest.java index 57ca22e3f..af7133bca 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryTest.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.vlog; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerBasics.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerBasics.java similarity index 93% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerBasics.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerBasics.java index 0c5ecf2e2..ca6a3bed4 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerBasics.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerBasics.java @@ -1,11 +1,11 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; /* * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% @@ -59,14 +59,14 @@ public class VLogReasonerBasics { @Test(expected = NullPointerException.class) public void testSetAlgorithmNull() { - try (final Reasoner reasoner = Reasoner.getInstance();) { + try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new);) { reasoner.setAlgorithm(null); } } @Test(expected = NullPointerException.class) public void setRuleRewriteStrategy1() { - try (final Reasoner reasoner = Reasoner.getInstance();) { + try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new);) { reasoner.setRuleRewriteStrategy(null); } } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCombinedInputs.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerCombinedInputs.java similarity index 97% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCombinedInputs.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerCombinedInputs.java index b3f2fba74..17f5eac18 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCombinedInputs.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerCombinedInputs.java @@ -1,19 +1,19 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; import static org.junit.Assert.assertEquals; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -41,6 +41,7 @@ import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; public class VLogReasonerCombinedInputs { diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvInput.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerCsvInput.java similarity index 95% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvInput.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerCsvInput.java index 3ec10b94f..b5b365174 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvInput.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerCsvInput.java @@ -1,17 +1,17 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -42,6 +42,8 @@ import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; public class VLogReasonerCsvInput { diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvOutput.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerCsvOutput.java similarity index 98% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvOutput.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerCsvOutput.java index 7585f47bf..45981bb6c 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvOutput.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerCsvOutput.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerNegation.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerNegation.java similarity index 97% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerNegation.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerNegation.java index 39f802725..215590006 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerNegation.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerNegation.java @@ -1,17 +1,17 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerRdfInput.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerRdfInput.java similarity index 95% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerRdfInput.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerRdfInput.java index f9b52ad44..c64e829ea 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerRdfInput.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerRdfInput.java @@ -1,17 +1,17 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -39,6 +39,8 @@ import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; public class VLogReasonerRdfInput { diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerSparqlInput.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerSparqlInput.java similarity index 97% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerSparqlInput.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerSparqlInput.java index 238d488a1..7fd6c34ad 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerSparqlInput.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerSparqlInput.java @@ -1,17 +1,17 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -39,6 +39,7 @@ import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; public class VLogReasonerSparqlInput { diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerStateTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerStateTest.java similarity index 96% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerStateTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerStateTest.java index 91c61c680..17f3dc8cc 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerStateTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerStateTest.java @@ -1,17 +1,17 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -49,6 +49,7 @@ import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.core.reasoner.ReasonerState; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; public class VLogReasonerStateTest { @@ -67,21 +68,21 @@ public class VLogReasonerStateTest { @Test(expected = ReasonerStateException.class) public void testFailAnswerQueryBeforeLoad() { - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { reasoner.answerQuery(exampleQueryAtom, true); } } @Test(expected = ReasonerStateException.class) public void testFailExportQueryAnswersBeforeLoad() throws IOException { - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { reasoner.exportQueryAnswersToCsv(exampleQueryAtom, "", true); } } @Test(expected = ReasonerStateException.class) public void testFailAnswerQueryAfterReset() throws IOException { - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { reasoner.reason(); reasoner.resetReasoner(); reasoner.answerQuery(exampleQueryAtom, true); @@ -90,7 +91,7 @@ public void testFailAnswerQueryAfterReset() throws IOException { @Test(expected = ReasonerStateException.class) public void testFailExportQueryAnswersAfterReset() throws IOException { - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { reasoner.reason(); reasoner.resetReasoner(); reasoner.exportQueryAnswersToCsv(exampleQueryAtom, "", true); @@ -171,7 +172,7 @@ public void testAddFacts2() throws IOException { @Test public void testResetBeforeLoad() { - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { reasoner.resetReasoner(); } } @@ -279,7 +280,7 @@ public void testResetEmptyKnowledgeBase() throws IOException { @Test(expected = ReasonerStateException.class) public void testFailExportQueryAnswerToCsvBeforeLoad() throws IOException { - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { reasoner.exportQueryAnswersToCsv(exampleQueryAtom, FileDataSourceTestUtils.OUTPUT_FOLDER + "output.csv", true); } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java similarity index 97% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java index 99c6a68f4..5eb58262e 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java @@ -1,4 +1,24 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ import static org.junit.Assert.*; import static org.mockito.Mockito.*; @@ -28,26 +48,7 @@ import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; - -/*- - * #%L - * Rulewerk Core Components - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ +import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; public class VLogReasonerWriteInferencesTest { private final Constant c = Expressions.makeAbstractConstant("http://example.org/c"); diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogTermNamesTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogTermNamesTest.java similarity index 98% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogTermNamesTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogTermNamesTest.java index ed85ea768..2ff298e9d 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogTermNamesTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogTermNamesTest.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.vlog; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverterTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverterTest.java similarity index 97% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverterTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverterTest.java index 0f67a6fa5..2e2db1b5c 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverterTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverterTest.java @@ -1,17 +1,17 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. From dcc5f033616ca1cdb26078d0b36a0b209f70995e Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 1 Apr 2020 19:53:19 +0200 Subject: [PATCH 0603/1003] Core: Generalise writeInferences to forEachInference --- .../rulewerk/core/reasoner/Reasoner.java | 53 +++++++++++++++++-- .../rulewerk/reasoner/vlog/VLogReasoner.java | 7 +-- 2 files changed, 51 insertions(+), 9 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java index e7a0245c2..1cf1f95fa 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java @@ -24,6 +24,8 @@ import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; +import java.util.List; +import java.util.function.BiConsumer; import java.util.function.Function; import java.util.function.Supplier; @@ -33,14 +35,17 @@ import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.NamedNull; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.QueryResult; import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.api.TermType; import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** - * Interface that exposes the (existential) rule reasoning capabilities of a Reasoner. - *
    + * Interface that exposes the (existential) rule reasoning capabilities of a + * Reasoner.
    * The knowledge base of the reasoner can be loaded with explicit facts * and existential rules that would infer implicit facts trough * reasoning.
    @@ -77,13 +82,27 @@ public interface Reasoner extends AutoCloseable, KnowledgeBaseListener { /** * Factory method that to instantiate a Reasoner with an empty knowledge base. * + * @param makeReasoner a function that creates a Reasoner instances given a + * {@link KnowledgeBase}. + * * @return a {@link Reasoner} instance. */ static Reasoner getInstance(Function makeReasoner) { return getInstance(makeReasoner, KnowledgeBase::new); } - static Reasoner getInstance(Function makeReasoner, Supplier makeKnowledgeBase) { + /** + * Factory method that to instantiate a Reasoner with an empty knowledge base. + * + * @param makeReasoner a function that creates a Reasoner instances given a + * {@link KnowledgeBase}. + * @param makeKnowledgeBase a function that creates a {@link KnowledgeBase} + * instance. + * + * @return a {@link Reasoner} instance. + */ + static Reasoner getInstance(Function makeReasoner, + Supplier makeKnowledgeBase) { final KnowledgeBase knowledgeBase = makeKnowledgeBase.get(); return makeReasoner.apply(knowledgeBase); } @@ -95,6 +114,27 @@ static Reasoner getInstance(Functi */ KnowledgeBase getKnowledgeBase(); + /** + * Interface for actions to perform on inferences. + * + * Essentially a {@link java.util.function.BiConsumer}, but with a more + * permissive Exception spec. + */ + @FunctionalInterface + public interface InferenceAction { + void accept(Predicate predicate, List termList) throws IOException; + } + + /** + * Performs the given action for each inference. + * + * @param action The action to be performed for each inference. + * @return the correctness of the inferences, depending on the state of the + * reasoning (materialisation) and its {@link KnowledgeBase}. + * @throws IOException + */ + Correctness forEachInference(InferenceAction action) throws IOException; + /** * Exports all the (explicit and implicit) facts inferred during reasoning of * the knowledge base to an OutputStream. @@ -104,7 +144,12 @@ static Reasoner getInstance(Functi * reasoning (materialisation) and its {@link KnowledgeBase}. * @throws IOException */ - Correctness writeInferences(OutputStream stream) throws IOException; + default Correctness writeInferences(OutputStream stream) throws IOException { + final KnowledgeBase knowledgeBase = getKnowledgeBase(); + stream.write(Serializer.getBaseAndPrefixDeclarations(knowledgeBase).getBytes()); + return forEachInference((predicate, termList) -> stream + .write(Serializer.getFactString(predicate, termList, knowledgeBase::unresolveAbsoluteIri).getBytes())); + } /** * Exports all the (explicit and implicit) facts inferred during reasoning of diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java index c487cc102..9cdee5143 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java @@ -458,7 +458,7 @@ private void validateBeforeQuerying(final PositiveLiteral query) { } @Override - public Correctness writeInferences(OutputStream stream) throws IOException { + public Correctness forEachInference(InferenceAction action) throws IOException { validateNotClosed(); if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { throw new ReasonerStateException(this.reasonerState, @@ -466,8 +466,6 @@ public Correctness writeInferences(OutputStream stream) throws IOException { } final Set toBeQueriedHeadPredicates = getKnowledgeBasePredicates(); - stream.write(Serializer.getBaseAndPrefixDeclarations(knowledgeBase).getBytes()); - for (final Predicate predicate : toBeQueriedHeadPredicates) { final PositiveLiteral queryAtom = getQueryAtom(predicate); final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(queryAtom); @@ -475,8 +473,7 @@ public Correctness writeInferences(OutputStream stream) throws IOException { while (answers.hasNext()) { final karmaresearch.vlog.Term[] vlogTerms = answers.next(); final List termList = VLogToModelConverter.toTermList(vlogTerms); - stream.write(Serializer.getFactString(predicate, termList, knowledgeBase::unresolveAbsoluteIri) - .getBytes()); + action.accept(predicate, termList); } } catch (final NotStartedException e) { throw new RulewerkRuntimeException("Inconsistent reasoner state.", e); From f96db52c92b6f01051dc19753b65da4cd66638c1 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 1 Apr 2020 20:17:13 +0200 Subject: [PATCH 0604/1003] Add Reasoner#getInferences First steps towards #167, still requires more tests. --- .../rulewerk/core/reasoner/Reasoner.java | 41 +++++++++++++++++-- 1 file changed, 38 insertions(+), 3 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java index 1cf1f95fa..1d0241e96 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java @@ -25,10 +25,11 @@ import java.io.IOException; import java.io.OutputStream; import java.util.List; -import java.util.function.BiConsumer; import java.util.function.Function; import java.util.function.Supplier; +import java.util.stream.Stream; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; @@ -41,6 +42,7 @@ import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.api.TermType; import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** @@ -151,9 +153,42 @@ default Correctness writeInferences(OutputStream stream) throws IOException { .write(Serializer.getFactString(predicate, termList, knowledgeBase::unresolveAbsoluteIri).getBytes())); } + public class CorrectnessAndInferences { + private Correctness correctness; + private Stream inferences; + + CorrectnessAndInferences(Correctness correctness, Stream inferences) { + this.correctness = correctness; + this.inferences = inferences; + } + + public Correctness getCorrectness() { + return this.correctness; + } + + public Stream getInferences() { + return this.inferences; + } + } + + default CorrectnessAndInferences getInferences() { + Stream.Builder builder = Stream.builder(); + Correctness correctness; + try { + correctness = forEachInference( + (predicate, termList) -> builder.accept(Expressions.makeFact(predicate, termList))); + } catch (IOException e) { + // this will never throw. + throw new RulewerkRuntimeException("unexpected IOException", e); + } + + return new CorrectnessAndInferences(correctness, builder.build()); + } + /** - * Exports all the (explicit and implicit) facts inferred during reasoning of - * the knowledge base to a desired file. + * Exports all the (explicit and + * {@link org.omg.PortableServer.IMPLICIT_ACTIVATION_POLICY_ID}) facts inferred + * during reasoning of the knowledge base to a desired file. * * @param filePath a String of the file path for the facts to be written to. * @return the correctness of the query answers, depending on the state of the From 8701a75b0322229f0bebdfb4a7061a42c210c0d0 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 1 Apr 2020 22:29:45 +0200 Subject: [PATCH 0605/1003] Update paths for vlog-base build script --- build-vlog-library.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/build-vlog-library.sh b/build-vlog-library.sh index e82a046e0..c1d915137 100755 --- a/build-vlog-library.sh +++ b/build-vlog-library.sh @@ -21,6 +21,6 @@ else cd ../../.. fi -mkdir -p rulewerk-core/lib -cp local_builds/jvlog.jar rulewerk-core/lib/jvlog-local.jar +mkdir -p rulewerk-vlog/lib +cp local_builds/jvlog.jar rulewerk-vlog/lib/jvlog-local.jar mvn initialize -Pdevelopment From b41205666af6b7943f6e0b4b4d37a335421c24eb Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 15 May 2020 15:13:13 +0200 Subject: [PATCH 0606/1003] Core: Rework handling of Correctness in forEachInference --- .../rulewerk/core/reasoner/Reasoner.java | 70 +++++++++++-------- .../rulewerk/reasoner/vlog/VLogReasoner.java | 8 ++- 2 files changed, 45 insertions(+), 33 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java index 1d0241e96..6eadfc90a 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -25,6 +25,7 @@ import java.io.IOException; import java.io.OutputStream; import java.util.List; +import java.util.function.BiConsumer; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Stream; @@ -82,7 +83,7 @@ public interface Reasoner extends AutoCloseable, KnowledgeBaseListener { /** - * Factory method that to instantiate a Reasoner with an empty knowledge base. + * Factory method to instantiate a Reasoner with an empty knowledge base. * * @param makeReasoner a function that creates a Reasoner instances given a * {@link KnowledgeBase}. @@ -137,6 +138,23 @@ public interface InferenceAction { */ Correctness forEachInference(InferenceAction action) throws IOException; + /** + * Performs the given action for each inference, swallowing + * checked exceptions. + * + * @param action The action to be performed for ecah inference. + * @return the correctness of the inferences, depending on the + * state of the reasoning (materialisation) and its {@link + * KnowledgeBase}. + */ + default Correctness unsafeForEachInference(BiConsumer> action) { + try { + return forEachInference(action::accept); + } catch (IOException e) { + throw new RulewerkRuntimeException(e); + } + } + /** * Exports all the (explicit and implicit) facts inferred during reasoning of * the knowledge base to an OutputStream. @@ -153,38 +171,28 @@ default Correctness writeInferences(OutputStream stream) throws IOException { .write(Serializer.getFactString(predicate, termList, knowledgeBase::unresolveAbsoluteIri).getBytes())); } - public class CorrectnessAndInferences { - private Correctness correctness; - private Stream inferences; - - CorrectnessAndInferences(Correctness correctness, Stream inferences) { - this.correctness = correctness; - this.inferences = inferences; - } - - public Correctness getCorrectness() { - return this.correctness; - } - - public Stream getInferences() { - return this.inferences; - } - } - - default CorrectnessAndInferences getInferences() { + /** + * Return a stream of all inferences. + * + * @return a {@link Stream} of {@link Fact} objects corresponding + * to all inferences. + */ + default Stream getInferences() { Stream.Builder builder = Stream.builder(); - Correctness correctness; - try { - correctness = forEachInference( - (predicate, termList) -> builder.accept(Expressions.makeFact(predicate, termList))); - } catch (IOException e) { - // this will never throw. - throw new RulewerkRuntimeException("unexpected IOException", e); - } + unsafeForEachInference((predicate, termList) -> builder.accept(Expressions.makeFact(predicate, termList))); - return new CorrectnessAndInferences(correctness, builder.build()); + return builder.build(); } + /** + * Return the {@link Correctness} status of query answers. + * + * @return the correctnes of query answers, depending on the state + * of the reasoning (materialisation) and aits {@link + * KnowledgeBase}. + */ + Correctness getCorrectness(); + /** * Exports all the (explicit and * {@link org.omg.PortableServer.IMPLICIT_ACTIVATION_POLICY_ID}) facts inferred diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java index 9cdee5143..a2a7f1050 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -486,6 +486,10 @@ public Correctness forEachInference(InferenceAction action) throws IOException { return this.correctness; } + public Correctness getCorrectness() { + return this.correctness; + } + private void logWarningOnCorrectness() { if (this.correctness != Correctness.SOUND_AND_COMPLETE) { LOGGER.warn("Query answers may be {} with respect to the current Knowledge Base!", this.correctness); From 1a467380d062c508601d7a6d67e53f017bbb615d Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 15 May 2020 19:19:19 +0200 Subject: [PATCH 0607/1003] Core: Move VLog-specific part of InMemoryDataSource to rulewerk-vlog --- .../rulewerk/core/reasoner/Reasoner.java | 35 +++--- .../implementation/InMemoryDataSource.java | 76 ++----------- .../InMemoryGraphAnalysisExample.java | 15 +-- rulewerk-vlog/pom.xml | 2 +- .../reasoner/vlog/VLogInMemoryDataSource.java | 107 ++++++++++++++++++ .../rulewerk/reasoner/vlog/VLogReasoner.java | 11 +- .../vlog/QueryAnsweringCorrectnessTest.java | 6 +- ...LogDataSourceConfigurationVisitorTest.java | 6 +- .../vlog/VLogReasonerWriteInferencesTest.java | 6 +- 9 files changed, 163 insertions(+), 101 deletions(-) create mode 100644 rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java index 6eadfc90a..48c994911 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java @@ -45,6 +45,7 @@ import org.semanticweb.rulewerk.core.model.api.Variable; import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; /** * Interface that exposes the (existential) rule reasoning capabilities of a @@ -94,6 +95,17 @@ static Reasoner getInstance(Function make return getInstance(makeReasoner, KnowledgeBase::new); } + /** + * Factory method to create a suitable {@link InMemoryDataSource} with given + * arity and initial capacity. + * + * @param arity the arity for the data source. + * @param initialCapacity the initial capacity of the data source. + * + * @return an instance of an implementation of InMemoryDataSource. + */ + InMemoryDataSource makeInMemoryDataSource(final int arity, final int initialCapacity); + /** * Factory method that to instantiate a Reasoner with an empty knowledge base. * @@ -139,13 +151,11 @@ public interface InferenceAction { Correctness forEachInference(InferenceAction action) throws IOException; /** - * Performs the given action for each inference, swallowing - * checked exceptions. + * Performs the given action for each inference, swallowing checked exceptions. * * @param action The action to be performed for ecah inference. - * @return the correctness of the inferences, depending on the - * state of the reasoning (materialisation) and its {@link - * KnowledgeBase}. + * @return the correctness of the inferences, depending on the state of the + * reasoning (materialisation) and its {@link KnowledgeBase}. */ default Correctness unsafeForEachInference(BiConsumer> action) { try { @@ -174,8 +184,8 @@ default Correctness writeInferences(OutputStream stream) throws IOException { /** * Return a stream of all inferences. * - * @return a {@link Stream} of {@link Fact} objects corresponding - * to all inferences. + * @return a {@link Stream} of {@link Fact} objects corresponding to all + * inferences. */ default Stream getInferences() { Stream.Builder builder = Stream.builder(); @@ -185,12 +195,11 @@ default Stream getInferences() { } /** - * Return the {@link Correctness} status of query answers. - * - * @return the correctnes of query answers, depending on the state - * of the reasoning (materialisation) and aits {@link - * KnowledgeBase}. - */ + * Return the {@link Correctness} status of query answers. + * + * @return the correctnes of query answers, depending on the state of the + * reasoning (materialisation) and aits {@link KnowledgeBase}. + */ Correctness getCorrectness(); /** diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/InMemoryDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/InMemoryDataSource.java index 72af91a42..bdf244ac5 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/InMemoryDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/InMemoryDataSource.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,8 +20,6 @@ * #L% */ -import java.util.Arrays; - import org.semanticweb.rulewerk.core.model.api.DataSource; import org.semanticweb.rulewerk.core.model.api.Fact; @@ -35,12 +33,10 @@ * @author Markus Kroetzsch * */ -public class InMemoryDataSource implements ReasonerDataSource { +public abstract class InMemoryDataSource implements ReasonerDataSource { - String[][] data; - int nextEmptyTuple = 0; - int capacity; - final int arity; + protected int capacity; + protected final int arity; /** * Create a new in-memory data source for facts of the specified arity. The @@ -54,23 +50,6 @@ public class InMemoryDataSource implements ReasonerDataSource { public InMemoryDataSource(final int arity, final int initialCapacity) { this.capacity = initialCapacity; this.arity = arity; - this.data = new String[initialCapacity][arity]; - } - - /** - * Transforms a constant name in a format suitable for the - * reasoner. The default implementation assumes the VLog backend. - * @param constantName the name of the constant - * - * @return a transformed version of constantName that is suitable for the Reasoner. - */ - protected String transformConstantName(String constantName) { - if (!constantName.startsWith("\"") && constantName.contains(":")) { - // enclose IRIs with brackets - return "<" + constantName + ">"; - } - // it's either a datatype literal, or a relative IRI, leave it unchanged - return constantName; } /** @@ -79,51 +58,12 @@ protected String transformConstantName(String constantName) { * * @param constantNames the string names of the constants in this fact */ - public void addTuple(final String... constantNames) { + public abstract void addTuple(final String... constantNames); + + protected void validateArity(final String... constantNames) { if (constantNames.length != this.arity) { throw new IllegalArgumentException("This data source holds tuples of arity " + this.arity + ". Adding a tuple of size " + constantNames.length + " is not possible."); } - if (this.nextEmptyTuple == this.capacity) { - this.capacity = this.capacity * 2; - this.data = Arrays.copyOf(this.data, this.capacity); - } - this.data[this.nextEmptyTuple] = new String[this.arity]; - for (int i = 0; i < this.arity; i++) { - this.data[this.nextEmptyTuple][i] = transformConstantName(constantNames[i]); - } - this.nextEmptyTuple++; - } - - /** - * Returns the data stored in this data source, in the format expected by the - * VLog reasoner backend. - * - * @return the data - */ - public String[][] getData() { - if (this.nextEmptyTuple == this.capacity) { - return this.data; - } else { - return Arrays.copyOf(this.data, this.nextEmptyTuple); - } - } - - @Override - public String getSyntacticRepresentation() { - final StringBuilder sb = new StringBuilder( - "This InMemoryDataSource holds the following tuples of constant names, one tuple per line:"); - for (int i = 0; i < getData().length; i++) { - for (int j = 0; j < this.data[i].length; j++) { - sb.append(this.data[i][j] + " "); - } - sb.append("\n"); - } - return sb.toString(); - } - - @Override - public void accept(DataSourceConfigurationVisitor visitor) { - visitor.visit(this); } } diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java index 591261fb1..838ea639e 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -27,19 +27,20 @@ import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; -import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; +import org.semanticweb.rulewerk.reasoner.vlog.VLogInMemoryDataSource; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; /** * This example shows how to reason efficiently with data sets generated in * Java. We generate a random graph with several million edges, check * connectivity, and count triangles. - * + * * Parameters can be modified to obtain graphs of different sizes and density. * It should be noted, however, that the number of triangles in reasonably dense * graphs tends to be huge, and it is easy to exhaust memory in this way. - * + * * @author Markus Kroetzsch * */ @@ -53,7 +54,7 @@ public static void main(final String[] args) throws ParsingException, IOExceptio final int vertexCount = 10000; final double density = 0.03; // initialise data source for storing edges (estimate how many we'll need) - final InMemoryDataSource edges = new InMemoryDataSource(2, (int) (vertexCount * vertexCount * density) + 1000); + final InMemoryDataSource edges = new VLogInMemoryDataSource(2, (int) (vertexCount * vertexCount * density) + 1000); int edgeCount = 0; for (int i = 1; i <= vertexCount; i++) { for (int j = 1; j <= vertexCount; j++) { @@ -64,7 +65,7 @@ public static void main(final String[] args) throws ParsingException, IOExceptio } } // also make a unary data source to mark vertices: - final InMemoryDataSource vertices = new InMemoryDataSource(1, vertexCount); + final InMemoryDataSource vertices = new VLogInMemoryDataSource(1, vertexCount); for (int i = 1; i <= vertexCount; i++) { vertices.addTuple("v" + i); } diff --git a/rulewerk-vlog/pom.xml b/rulewerk-vlog/pom.xml index 7aa17b881..6fd6c8d5b 100644 --- a/rulewerk-vlog/pom.xml +++ b/rulewerk-vlog/pom.xml @@ -14,7 +14,7 @@ jar Rulewerk VLog Reasoner Support - and model + Bindings for the VLog reasoner backend. 1.3.3-snapshot diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java new file mode 100644 index 000000000..9e5fabd64 --- /dev/null +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java @@ -0,0 +1,107 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.Arrays; + +import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.DataSourceConfigurationVisitor; + +/** + * Implementation of {@link InMemoryDataSource} for the VLog backend. + */ +public class VLogInMemoryDataSource extends InMemoryDataSource { + String[][] data; + int nextEmptyTuple = 0; + + public VLogInMemoryDataSource(final int arity, final int initialCapacity) { + super(arity, initialCapacity); + this.data = new String[initialCapacity][arity]; + } + + /** + * Transforms a constant name in a format suitable for the + * reasoner. The default implementation assumes the VLog backend. + * @param constantName the name of the constant + * + * @return a transformed version of constantName that is suitable for the Reasoner. + */ + protected String transformConstantName(String constantName) { + if (!constantName.startsWith("\"") && constantName.contains(":")) { + // enclose IRIs with brackets + return "<" + constantName + ">"; + } + // it's either a datatype literal, or a relative IRI, leave it unchanged + return constantName; + } + + /** + * Adds a fact to this data source. The number of constant names must agree with + * the arity of this data source. + * + * @param constantNames the string names of the constants in this fact + */ + public void addTuple(final String... constantNames) { + validateArity(constantNames); + + if (this.nextEmptyTuple == this.capacity) { + this.capacity = this.capacity * 2; + this.data = Arrays.copyOf(this.data, this.capacity); + } + this.data[this.nextEmptyTuple] = new String[this.arity]; + for (int i = 0; i < this.arity; i++) { + this.data[this.nextEmptyTuple][i] = transformConstantName(constantNames[i]); + } + this.nextEmptyTuple++; + } + + /** + * Returns the data stored in this data source, in the format expected by the + * VLog reasoner backend. + * + * @return the data + */ + public String[][] getData() { + if (this.nextEmptyTuple == this.capacity) { + return this.data; + } else { + return Arrays.copyOf(this.data, this.nextEmptyTuple); + } + } + + @Override + public String getSyntacticRepresentation() { + final StringBuilder sb = new StringBuilder( + "This InMemoryDataSource holds the following tuples of constant names, one tuple per line:"); + for (int i = 0; i < getData().length; i++) { + for (int j = 0; j < this.data[i].length; j++) { + sb.append(this.data[i][j] + " "); + } + sb.append("\n"); + } + return sb.toString(); + } + + @Override + public void accept(DataSourceConfigurationVisitor visitor) { + visitor.visit(this); + } +} diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java index a2a7f1050..a763af891 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java @@ -111,6 +111,11 @@ public KnowledgeBase getKnowledgeBase() { return this.knowledgeBase; } + @Override + public InMemoryDataSource makeInMemoryDataSource(final int arity, final int initialCapacity) { + return new VLogInMemoryDataSource(arity, initialCapacity); + } + @Override public void setAlgorithm(final Algorithm algorithm) { Validate.notNull(algorithm, "Algorithm cannot be null!"); @@ -222,9 +227,9 @@ void loadInMemoryDataSources(final VLogKnowledgeBase vLogKB) { } void loadInMemoryDataSource(final DataSource dataSource, final Predicate predicate) { - if (dataSource instanceof InMemoryDataSource) { + if (dataSource instanceof VLogInMemoryDataSource) { - final InMemoryDataSource inMemoryDataSource = (InMemoryDataSource) dataSource; + final VLogInMemoryDataSource inMemoryDataSource = (VLogInMemoryDataSource) dataSource; try { load(predicate, inMemoryDataSource); } catch (final EDBConfigurationException e) { @@ -233,7 +238,7 @@ void loadInMemoryDataSource(final DataSource dataSource, final Predicate predica } } - void load(final Predicate predicate, final InMemoryDataSource inMemoryDataSource) throws EDBConfigurationException { + void load(final Predicate predicate, final VLogInMemoryDataSource inMemoryDataSource) throws EDBConfigurationException { final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(predicate); this.vLog.addData(vLogPredicateName, inMemoryDataSource.getData()); diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryAnsweringCorrectnessTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryAnsweringCorrectnessTest.java index d9cc90601..583b34229 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryAnsweringCorrectnessTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryAnsweringCorrectnessTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -71,7 +71,7 @@ public class QueryAnsweringCorrectnessTest { private static final Fact factQg = Expressions.makeFact(predQ, g); private static final Fact factQh = Expressions.makeFact(predQ, h); - private static final InMemoryDataSource datasource = new InMemoryDataSource(1, 2); + private static final InMemoryDataSource datasource = new VLogInMemoryDataSource(1, 2); { datasource.addTuple("e"); diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitorTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitorTest.java index b4b860f58..df53efe03 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitorTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitorTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -93,7 +93,7 @@ public void visit_SparqlQueryResultDataSource_succeeds() throws MalformedURLExce @Test public void visit_InMemoryDataSource_returnsNull() { - final InMemoryDataSource inMemoryDataSource = new InMemoryDataSource(1, 1); + final InMemoryDataSource inMemoryDataSource = new VLogInMemoryDataSource(1, 1); assertEquals(null, toConfigString(inMemoryDataSource)); } diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java index 5eb58262e..81835692d 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -80,7 +80,7 @@ public class VLogReasonerWriteInferencesTest { private final Fact f4 = Expressions.makeFact("country", Expressions.makeAbstractConstant("germany")); private final Fact f5 = Expressions.makeFact(universityPred, Expressions.makeAbstractConstant("tudresden"), Expressions.makeAbstractConstant("germany")); - private final InMemoryDataSource locations = new InMemoryDataSource(2, 1); + private final InMemoryDataSource locations = new VLogInMemoryDataSource(2, 1); private KnowledgeBase kb; @Before From 3a83965722ea1c0eb78eba46bb0c9187b8b2fa24 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 15 May 2020 19:52:57 +0200 Subject: [PATCH 0608/1003] VLog: Add regression tests --- .../rulewerk/reasoner/vlog/VLogReasoner.java | 4 +- .../reasoner/vlog/VLogRegressionTest.java | 78 +++++++++++++++++++ 2 files changed, 79 insertions(+), 3 deletions(-) create mode 100644 rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogRegressionTest.java diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java index a763af891..a40e8cd0d 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java @@ -21,7 +21,6 @@ */ import java.io.IOException; -import java.io.OutputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; @@ -43,7 +42,6 @@ import org.semanticweb.rulewerk.core.model.api.Statement; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; import org.semanticweb.rulewerk.core.reasoner.AcyclicityNotion; import org.semanticweb.rulewerk.core.reasoner.Algorithm; import org.semanticweb.rulewerk.core.reasoner.Correctness; @@ -56,8 +54,8 @@ import org.semanticweb.rulewerk.core.reasoner.ReasonerState; import org.semanticweb.rulewerk.core.reasoner.RuleRewriteStrategy; import org.semanticweb.rulewerk.core.reasoner.implementation.EmptyQueryResultIterator; -import org.semanticweb.rulewerk.core.reasoner.implementation.QueryAnswerCountImpl; import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.QueryAnswerCountImpl; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogRegressionTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogRegressionTest.java new file mode 100644 index 000000000..0bd2434c1 --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogRegressionTest.java @@ -0,0 +1,78 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; + +/* + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; + +public class VLogRegressionTest { + @Test + public void test_issue_166() throws IOException { + final KnowledgeBase knowledgeBase = new KnowledgeBase(); + + final Predicate A = Expressions.makePredicate("A", 1); + final Predicate B = Expressions.makePredicate("B", 1); + final Predicate C = Expressions.makePredicate("C", 1); + final Predicate R = Expressions.makePredicate("Rel", 1); + + final AbstractConstant star = Expressions.makeAbstractConstant("star"); + final AbstractConstant cy = Expressions.makeAbstractConstant("cy"); + final AbstractConstant r0 = Expressions.makeAbstractConstant("r0"); + final UniversalVariable x0 = Expressions.makeUniversalVariable("x0"); + final UniversalVariable x2 = Expressions.makeUniversalVariable("x2"); + + knowledgeBase.addStatement(Expressions.makeRule(Expressions.makePositiveLiteral(B, x2), + Expressions.makePositiveLiteral(A, x2))); + knowledgeBase.addStatement(Expressions.makeFact(B, star)); + knowledgeBase.addStatement(Expressions.makeRule(Expressions.makePositiveLiteral(R, r0), + Expressions.makePositiveLiteral(C, cy), + Expressions.makePositiveLiteral(B, x0))); + knowledgeBase.addStatement(Expressions.makeFact(C, cy)); + + try (final Reasoner reasoner = new VLogReasoner(knowledgeBase)) { + reasoner.reason(); + final QueryResultIterator result = reasoner.answerQuery(Expressions.makePositiveLiteral(R, x0), false); + assertTrue(result.hasNext()); + final QueryResult terms = result.next(); + assertFalse(result.hasNext()); + final List expectedTerms = new ArrayList(); + expectedTerms.add(r0); + assertEquals(expectedTerms, terms.getTerms()); + } + } +} From 1d111d474a4409efd6fc588c6d999893a6ee6889 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 15 May 2020 20:04:43 +0200 Subject: [PATCH 0609/1003] Update documentation --- README.md | 5 +++-- RELEASE-NOTES.md | 5 ++++- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index f950b8107..11ee4e1fc 100644 --- a/README.md +++ b/README.md @@ -27,10 +27,11 @@ You need to use Java 1.8 or above. Available modules include: * **rulewerk-rdf**: support for reading from RDF files in Java (not required for loading RDF directly during reasoning) * **rulewerk-owlapi**: support for converting rules from OWL ontology, loaded with the OWL API * **rulewerk-client**: stand-alone application that builds a [command-line client](https://github.com/knowsys/rulewerk/wiki/Standalone-client) for Rulewerk. +* **rulewerk-vlog**: support for using [VLog](https://github.com/karmaresearch/vlog) as a reasoning backend for Rulewerk. -The released packages use rulewerk-base, which packages system-dependent binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows: +The released packages use vlog-base, which packages system-dependent binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows: -* Run [build-vlog-library.sh](https://github.com/knowsys/rulewerk/blob/master/build-vlog-library.sh) or execute the commands in this file manually. This will compile a local jar file on your system, copy it to ```./rulewerk-core/lib/jvlog-local.jar```, and install the new jar locally in Maven in place of the distributed version of rulewerk-base. +* Run [build-vlog-library.sh](https://github.com/knowsys/rulewerk/blob/master/build-vlog-library.sh) or execute the commands in this file manually. This will compile a local jar file on your system, copy it to ```./rulewerk-vlog/lib/jvlog-local.jar```, and install the new jar locally in Maven in place of the distributed version of vlog-base. * Run ```mvn install``` to test if the setup works diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index d0408e336..da1594e58 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -15,11 +15,14 @@ Breaking changes: changing several import paths. `Reasoner.getInstance()` now takes a mandatory argument, a function taking a `KnowledgeBase` and returning a `Reasoner` instance. Previous behaviour can be obtained - by using `Reasoner.getInstance(VLogReasoner::new)`. + by using `Reasoner.getInstance(VLogReasoner::new)`. As a result, + InMemoryDataSource has become an abstract class, use + VLogInMemoryDataSource where applicable. New features: * Counting query answers is more efficient now, using `Reasoner.countQueryAnswers()` * All inferred facts can be serialized to a file using `Reasoner.writeInferences()` +* All inferred facts can be obtained as a Stream using `Reasoner.getInferences()` * Knowledge bases can be serialized to a file using `KnowlegdeBase.writeKnowledgeBase()` * Rules files may import other rules files using `@import` and `@import-relative`, where the latter resolves relative IRIs using From 4001a34945a592820546178919285fecdffa49e2 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 15 May 2020 21:39:40 +0200 Subject: [PATCH 0610/1003] VLog: Add another test case --- .../reasoner/vlog/VLogRegressionTest.java | 31 +++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogRegressionTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogRegressionTest.java index 0bd2434c1..4aa92c75b 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogRegressionTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogRegressionTest.java @@ -75,4 +75,35 @@ public void test_issue_166() throws IOException { assertEquals(expectedTerms, terms.getTerms()); } } + + @Test + public void test_vlog_issue_44() throws IOException { + final KnowledgeBase knowledgeBase = new KnowledgeBase(); + + final Predicate P = Expressions.makePredicate("P", 1); + final Predicate Q = Expressions.makePredicate("Q", 1); + final Predicate R = Expressions.makePredicate("R", 1); + + final AbstractConstant c = Expressions.makeAbstractConstant("c"); + final AbstractConstant d = Expressions.makeAbstractConstant("d"); + final UniversalVariable x = Expressions.makeUniversalVariable("x"); + + knowledgeBase.addStatement(Expressions.makeFact(P, c)); + knowledgeBase.addStatement(Expressions.makeFact(Q, d)); + knowledgeBase.addStatement(Expressions.makeRule(Expressions.makePositiveLiteral(R, x), + Expressions.makePositiveLiteral(P, x), + Expressions.makeNegativeLiteral(Q, x))); + + try (final Reasoner reasoner = new VLogReasoner(knowledgeBase)) { + reasoner.reason(); + final QueryResultIterator result = reasoner.answerQuery(Expressions.makePositiveLiteral(R, x), false); + assertTrue(result.hasNext()); + final QueryResult terms = result.next(); + assertFalse(result.hasNext()); + final List expectedTerms = new ArrayList(); + expectedTerms.add(c); + assertEquals(expectedTerms, terms.getTerms()); + assertFalse(result.hasNext()); + } + } } From 8ec7ab9403320bd7dba1dc4f796cb441d84fd407 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Sun, 17 May 2020 15:07:32 +0200 Subject: [PATCH 0611/1003] VLog: Fix formatting --- .../rulewerk/reasoner/vlog/VLogRegressionTest.java | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogRegressionTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogRegressionTest.java index 4aa92c75b..d79fceab3 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogRegressionTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogRegressionTest.java @@ -1,8 +1,5 @@ package org.semanticweb.rulewerk.reasoner.vlog; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; - /* * #%L * Rulewerk VLog Reasoner Support @@ -24,6 +21,8 @@ */ import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import java.io.IOException; import java.util.ArrayList; From 0f2fccbbe756952acdf6873fc5f585789b04d31b Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 18 May 2020 21:25:11 +0200 Subject: [PATCH 0612/1003] Address review comments --- RELEASE-NOTES.md | 9 ++-- .../rulewerk/core/reasoner/Reasoner.java | 49 ++----------------- .../implementation/CsvFileDataSource.java | 6 +-- .../DataSourceConfigurationVisitor.java | 38 ++++++++++++-- .../implementation/FileDataSource.java | 31 ++---------- .../implementation/RdfFileDataSource.java | 6 +-- .../implementation/ReasonerDataSource.java | 8 +-- .../SparqlQueryResultDataSource.java | 19 +------ .../core/AddDataFromSparqlQueryResults.java | 6 +-- .../core/ConfigureReasonerLogging.java | 6 +-- .../examples/graal/AddDataFromDlgpFile.java | 6 +-- .../VLogDataSourceConfigurationVisitor.java | 27 +++++++--- .../reasoner/vlog/VLogKnowledgeBase.java | 12 +++-- .../rulewerk/reasoner/vlog/VLogReasoner.java | 5 -- .../reasoner/vlog/CsvFileDataSourceTest.java | 25 ++-------- .../vlog/FileDataSourceTestUtils.java | 5 +- .../rulewerk/reasoner/vlog/LoggingTest.java | 6 +-- .../reasoner/vlog/RdfFileDataSourceTest.java | 10 ++-- .../reasoner/vlog/ReasonerTimeoutTest.java | 6 +-- ...LogDataSourceConfigurationVisitorTest.java | 27 +++++++--- .../reasoner/vlog/VLogReasonerBasics.java | 8 +-- .../reasoner/vlog/VLogReasonerStateTest.java | 16 +++--- 22 files changed, 145 insertions(+), 186 deletions(-) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index da1594e58..9f52a1d57 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -12,12 +12,9 @@ Breaking changes: * The `FileDataSource` constructor and those of derived classes now take the path to a file instead of `File` object. * The VLog backend has been moved to a new `rulewerk-vlog` module, - changing several import paths. `Reasoner.getInstance()` now takes a - mandatory argument, a function taking a `KnowledgeBase` and - returning a `Reasoner` instance. Previous behaviour can be obtained - by using `Reasoner.getInstance(VLogReasoner::new)`. As a result, - InMemoryDataSource has become an abstract class, use - VLogInMemoryDataSource where applicable. + changing several import paths. `Reasoner.getInstance()` is + gone. Furthermore, InMemoryDataSource has become an abstract class, + use VLogInMemoryDataSource where applicable. New features: * Counting query answers is more efficient now, using `Reasoner.countQueryAnswers()` diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java index 48c994911..dcc3cea7b 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java @@ -82,46 +82,6 @@ */ public interface Reasoner extends AutoCloseable, KnowledgeBaseListener { - - /** - * Factory method to instantiate a Reasoner with an empty knowledge base. - * - * @param makeReasoner a function that creates a Reasoner instances given a - * {@link KnowledgeBase}. - * - * @return a {@link Reasoner} instance. - */ - static Reasoner getInstance(Function makeReasoner) { - return getInstance(makeReasoner, KnowledgeBase::new); - } - - /** - * Factory method to create a suitable {@link InMemoryDataSource} with given - * arity and initial capacity. - * - * @param arity the arity for the data source. - * @param initialCapacity the initial capacity of the data source. - * - * @return an instance of an implementation of InMemoryDataSource. - */ - InMemoryDataSource makeInMemoryDataSource(final int arity, final int initialCapacity); - - /** - * Factory method that to instantiate a Reasoner with an empty knowledge base. - * - * @param makeReasoner a function that creates a Reasoner instances given a - * {@link KnowledgeBase}. - * @param makeKnowledgeBase a function that creates a {@link KnowledgeBase} - * instance. - * - * @return a {@link Reasoner} instance. - */ - static Reasoner getInstance(Function makeReasoner, - Supplier makeKnowledgeBase) { - final KnowledgeBase knowledgeBase = makeKnowledgeBase.get(); - return makeReasoner.apply(knowledgeBase); - } - /** * Getter for the knowledge base to reason on. * @@ -153,7 +113,7 @@ public interface InferenceAction { /** * Performs the given action for each inference, swallowing checked exceptions. * - * @param action The action to be performed for ecah inference. + * @param action The action to be performed for each inference. * @return the correctness of the inferences, depending on the state of the * reasoning (materialisation) and its {@link KnowledgeBase}. */ @@ -198,14 +158,13 @@ default Stream getInferences() { * Return the {@link Correctness} status of query answers. * * @return the correctnes of query answers, depending on the state of the - * reasoning (materialisation) and aits {@link KnowledgeBase}. + * reasoning (materialisation) and its {@link KnowledgeBase}. */ Correctness getCorrectness(); /** - * Exports all the (explicit and - * {@link org.omg.PortableServer.IMPLICIT_ACTIVATION_POLICY_ID}) facts inferred - * during reasoning of the knowledge base to a desired file. + * Exports all the (explicit and implicit) facts inferred during + * reasoning of the knowledge base to a desired file. * * @param filePath a String of the file path for the facts to be written to. * @return the correctness of the query answers, depending on the state of the diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java index 3ee0a4574..7f72f25de 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -78,7 +78,7 @@ public String getSyntacticRepresentation() { } @Override - public void accept(DataSourceConfigurationVisitor visitor) { + public void accept(DataSourceConfigurationVisitor visitor) throws IOException { visitor.visit(this); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java index 82b3d11de..9ead436c5 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,12 +20,42 @@ * #L% */ +import java.io.IOException; + +/** + * A visitor to generate (reasoner-specific) configuration for the various data + * sources. + * + * @author Maximilian Marx + */ public interface DataSourceConfigurationVisitor { - public void visit(CsvFileDataSource dataSource); + /** + * Configure the reasoner for a {@link CsvFileDataSource}. + * + * @param dataSource the data source to configure. + * @throws IOexception when an IO error occurs during configuration. + */ + public void visit(CsvFileDataSource dataSource) throws IOException; - public void visit(RdfFileDataSource dataSource); + /** + * Configure the reasoner for a {@link RdfFileDataSource}. + * + * @param dataSource the data source to configure. + * @throws IOexception when an IO error occurs during configuration. + */ + public void visit(RdfFileDataSource dataSource) throws IOException; + /** + * Configure the reasoner for a {@link SparqlQueryResultDataSource}. + * + * @param dataSource the data source to configure. + */ public void visit(SparqlQueryResultDataSource dataSource); + /** + * Configure the reasoner for a {@link InMemoryDataSource}. + * + * @param dataSource the data source to configure. + */ public void visit(InMemoryDataSource dataSource); } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java index cbdb10e61..46ec295de 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,7 +22,6 @@ import java.io.File; import java.io.IOException; -import java.nio.file.Paths; import java.util.Optional; import java.util.stream.Stream; import java.util.stream.StreamSupport; @@ -43,11 +42,6 @@ public abstract class FileDataSource implements ReasonerDataSource { private final String filePath; private final String fileName; private final String extension; - /** - * The canonical path to the parent directory where the file resides. - */ - private final String dirCanonicalPath; - private final String fileNameWithoutExtension; /** * Constructor. @@ -68,8 +62,7 @@ public FileDataSource(final String filePath, final Iterable possibleExte this.filePath = filePath; // unmodified file path, necessary for correct serialisation this.fileName = this.file.getName(); this.extension = getValidExtension(this.fileName, possibleExtensions); - this.fileNameWithoutExtension = this.fileName.substring(0, this.fileName.lastIndexOf(this.extension)); - this.dirCanonicalPath = Paths.get(file.getCanonicalPath()).getParent().toString(); + file.getCanonicalPath(); // make sure that the path is valid. } private String getValidExtension(final String fileName, final Iterable possibleExtensions) { @@ -96,22 +89,8 @@ public String getName() { return this.fileName; } - /** - * Canonicalise the file path - * - * @return The canonical path to the parent directory where the file resides. - */ - public String getDirCanonicalPath() { - return this.dirCanonicalPath; - } - - /** - * Get the base name of the file, without an extension. - * - * @return the file basename without any extension. - */ - public String getFileNameWithoutExtension() { - return this.fileNameWithoutExtension; + public String getExtension() { + return this.extension; } @Override diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java index 29a3f327f..9df6c5d26 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -82,7 +82,7 @@ public Optional getRequiredArity() { } @Override - public void accept(DataSourceConfigurationVisitor visitor) { + public void accept(DataSourceConfigurationVisitor visitor) throws IOException { visitor.visit(this); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ReasonerDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ReasonerDataSource.java index 57c6e1dee..96020fceb 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ReasonerDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ReasonerDataSource.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,6 +20,8 @@ * #L% */ +import java.io.IOException; + import org.semanticweb.rulewerk.core.model.api.DataSource; /** @@ -32,5 +34,5 @@ public interface ReasonerDataSource extends DataSource { * * @param visitor the visitor. */ - public void accept(DataSourceConfigurationVisitor visitor); + public void accept(DataSourceConfigurationVisitor visitor) throws IOException; } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java index cdc8723d2..99f8548e0 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -108,21 +108,6 @@ public String getQueryVariables() { return this.queryVariables; } - // @Override - // public final String toConfigString() { - // final String configStringPattern = - - // PREDICATE_NAME_CONFIG_LINE + - - // DATASOURCE_TYPE_CONFIG_PARAM + "=" + DATASOURCE_TYPE_CONFIG_VALUE + "\n" + - - // "EDB%1$d_param0=" + this.endpoint + "\n" + "EDB%1$d_param1=" + this.queryVariables + "\n" + - - // "EDB%1$d_param2=" + this.queryBody + "\n"; - - // return configStringPattern; - // } - static String getQueryVariablesList(LinkedHashSet queryVariables) { final StringBuilder sb = new StringBuilder(); final Iterator iterator = queryVariables.iterator(); diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromSparqlQueryResults.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromSparqlQueryResults.java index 2f74a96fa..b32c784ea 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromSparqlQueryResults.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromSparqlQueryResults.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -121,7 +121,7 @@ public static void main(final String[] args) throws IOException { */ final Predicate queryPredicate = Expressions.makePredicate("publicationParents", 3); - try (Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { + try (Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { final KnowledgeBase kb = reasoner.getKnowledgeBase(); /* diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java index 902fbf543..6eeb04d60 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -66,7 +66,7 @@ public class ConfigureReasonerLogging { public static void main(final String[] args) throws IOException, ParsingException { - try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { + try (final Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { final KnowledgeBase kb = reasoner.getKnowledgeBase(); /* exists z. B(?y, !z) :- A(?x, ?y) . */ kb.addStatements(RuleParser.parseRule("B(?Y, !Z) :- A(?X, ?Y) .")); diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java index 1f9621bd8..14e0116ba 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -102,7 +102,7 @@ public static void main(final String[] args) throws IOException { * the reasoner automatically. */ - try (Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { + try (Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { final KnowledgeBase kb = reasoner.getKnowledgeBase(); /* diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java index dd4ac05f9..f5396dc24 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,8 +20,12 @@ * #L% */ +import java.io.IOException; +import java.nio.file.Paths; + import org.semanticweb.rulewerk.core.reasoner.implementation.DataSourceConfigurationVisitor; import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; + import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; @@ -39,21 +43,30 @@ public String getConfigString() { return configString; } - protected void setFileConfigString(FileDataSource dataSource) { + protected void setFileConfigString(FileDataSource dataSource) throws IOException { this.configString = PREDICATE_NAME_CONFIG_LINE + DATASOURCE_TYPE_CONFIG_PARAM + "=" + FILE_DATASOURCE_TYPE_CONFIG_VALUE + "\n" + - "EDB%1$d_param0=" + dataSource.getDirCanonicalPath() + "\n" + - "EDB%1$d_param1=" + dataSource.getFileNameWithoutExtension() + "\n"; + "EDB%1$d_param0=" + getDirCanonicalPath(dataSource) + "\n" + + "EDB%1$d_param1=" + getFileNameWithoutExtension(dataSource) + "\n"; + } + + String getDirCanonicalPath(FileDataSource dataSource) throws IOException { + return Paths.get(dataSource.getFile().getCanonicalPath()).getParent().toString(); + } + + String getFileNameWithoutExtension(FileDataSource dataSource) { + final String fileName = dataSource.getName(); + return fileName.substring(0, fileName.lastIndexOf(dataSource.getExtension())); } @Override - public void visit(CsvFileDataSource dataSource) { + public void visit(CsvFileDataSource dataSource) throws IOException { setFileConfigString(dataSource); } @Override - public void visit(RdfFileDataSource dataSource) { + public void visit(RdfFileDataSource dataSource) throws IOException { setFileConfigString(dataSource); } diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBase.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBase.java index 3bd57a52f..6af5a9c39 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBase.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBase.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,6 +20,7 @@ * #L% */ +import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Formatter; @@ -30,6 +31,7 @@ import java.util.Map.Entry; import java.util.Set; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.DataSource; import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; import org.semanticweb.rulewerk.core.model.api.Fact; @@ -124,7 +126,11 @@ int addDataSourceConfigurationString(final DataSource dataSource, final Predicat if (dataSource instanceof ReasonerDataSource) { final ReasonerDataSource reasonerDataSource = (ReasonerDataSource) dataSource; final VLogDataSourceConfigurationVisitor visitor = new VLogDataSourceConfigurationVisitor(); - reasonerDataSource.accept(visitor); + try { + reasonerDataSource.accept(visitor); + } catch (IOException e) { + throw new RulewerkRuntimeException("Error while building VLog data source configuration", e); + } final String configString = visitor.getConfigString(); if (configString != null) { formatter.format(configString, dataSourceIndex, ModelToVLogConverter.toVLogPredicate(predicate)); diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java index a40e8cd0d..695be1fca 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java @@ -109,11 +109,6 @@ public KnowledgeBase getKnowledgeBase() { return this.knowledgeBase; } - @Override - public InMemoryDataSource makeInMemoryDataSource(final int arity, final int initialCapacity) { - return new VLogInMemoryDataSource(arity, initialCapacity); - } - @Override public void setAlgorithm(final Algorithm algorithm) { Validate.notNull(algorithm, "Algorithm cannot be null!"); diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/CsvFileDataSourceTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/CsvFileDataSourceTest.java index d85f73d09..593993f73 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/CsvFileDataSourceTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/CsvFileDataSourceTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -52,24 +52,7 @@ public void testConstructor() throws IOException { final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(csvFile); final CsvFileDataSource zippedCsvFileDataSource = new CsvFileDataSource(gzFile); - FileDataSourceTestUtils.testConstructor(unzippedCsvFileDataSource, new File(csvFile).getName(), dirCanonicalPath, "file"); - FileDataSourceTestUtils.testConstructor(zippedCsvFileDataSource, new File(gzFile).getName(), dirCanonicalPath, "file"); + FileDataSourceTestUtils.testConstructor(unzippedCsvFileDataSource, new File(csvFile).getName()); + FileDataSourceTestUtils.testConstructor(zippedCsvFileDataSource, new File(gzFile).getName()); } - - @Test - public void testNoParentDir() throws IOException { - final FileDataSource fileDataSource = new CsvFileDataSource("file.csv"); - final String dirCanonicalPath = fileDataSource.getDirCanonicalPath(); - final String currentFolder = new File(".").getCanonicalPath(); - assertEquals(currentFolder, dirCanonicalPath); - } - - @Test - public void testNotNormalisedParentDir() throws IOException { - final FileDataSource fileDataSource = new CsvFileDataSource("./././file.csv"); - final String dirCanonicalPath = fileDataSource.getDirCanonicalPath(); - final String currentFolder = new File(".").getCanonicalPath(); - assertEquals(currentFolder, dirCanonicalPath); - } - } diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/FileDataSourceTestUtils.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/FileDataSourceTestUtils.java index 6a0819ed3..73128188c 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/FileDataSourceTestUtils.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/FileDataSourceTestUtils.java @@ -91,11 +91,8 @@ public static List> getCSVContent(final String csvFile) throws IOEx return content; } - public static void testConstructor(final FileDataSource fileDataSource, final String expectedFileName, - final String expectedDirCanonicalPath, final String expectedFileNameWithoutExtension) throws IOException { + public static void testConstructor(final FileDataSource fileDataSource, final String expectedFileName) throws IOException { assertEquals(expectedFileName, fileDataSource.getName()); - assertEquals(expectedDirCanonicalPath, fileDataSource.getDirCanonicalPath()); - assertEquals(expectedFileNameWithoutExtension, fileDataSource.getFileNameWithoutExtension()); } public static void testLoadEmptyFile(final Predicate predicate, final PositiveLiteral queryAtom, diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/LoggingTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/LoggingTest.java index 598ea90c9..57691fc99 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/LoggingTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/LoggingTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -109,7 +109,7 @@ public void testSetLogFileInexistent() throws IOException { @Test(expected = NullPointerException.class) public void testSetLogLevelNull() { - try (final Reasoner instance = Reasoner.getInstance(VLogReasoner::new)) { + try (final Reasoner instance = new VLogReasoner(new KnowledgeBase())) { instance.setLogLevel(null); } } diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/RdfFileDataSourceTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/RdfFileDataSourceTest.java index f16ab06f2..0ab0a7989 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/RdfFileDataSourceTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/RdfFileDataSourceTest.java @@ -11,9 +11,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -47,12 +47,10 @@ public void testConstructorFalseExtension() throws IOException { @Test public void testConstructor() throws IOException { - - final String dirCanonicalPath = new File(FileDataSourceTestUtils.INPUT_FOLDER).getCanonicalPath(); final RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(unzippedRdfFile); final RdfFileDataSource zippedRdfFileDataSource = new RdfFileDataSource(zippedRdfFile); - FileDataSourceTestUtils.testConstructor(unzippedRdfFileDataSource, new File(unzippedRdfFile).getName(), dirCanonicalPath, "file"); - FileDataSourceTestUtils.testConstructor(zippedRdfFileDataSource, new File(zippedRdfFile).getName(), dirCanonicalPath, "file"); + FileDataSourceTestUtils.testConstructor(unzippedRdfFileDataSource, new File(unzippedRdfFile).getName()); + FileDataSourceTestUtils.testConstructor(zippedRdfFileDataSource, new File(zippedRdfFile).getName()); } } diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ReasonerTimeoutTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ReasonerTimeoutTest.java index 415e03ccf..5ec62cfbb 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ReasonerTimeoutTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ReasonerTimeoutTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -119,7 +119,7 @@ public void setUp() { @Test(expected = IllegalArgumentException.class) public void testSetReasoningTimeout() { - try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new);) { + try (final Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.setReasoningTimeout(-3); } } diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitorTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitorTest.java index df53efe03..c11752655 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitorTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitorTest.java @@ -30,14 +30,13 @@ import java.util.LinkedHashSet; import org.junit.Test; - import org.semanticweb.rulewerk.core.model.api.Variable; import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.ReasonerDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; public class VLogDataSourceConfigurationVisitorTest { @@ -77,7 +76,7 @@ public void visit_RdfFileDataSource_succeeds() throws IOException { } @Test - public void visit_SparqlQueryResultDataSource_succeeds() throws MalformedURLException { + public void visit_SparqlQueryResultDataSource_succeeds() throws IOException, MalformedURLException { final SparqlQueryResultDataSource simpleDataSource = new SparqlQueryResultDataSource(endpoint, "b,a", "?a wdt:P22 ?b"); final LinkedHashSet queryVariables = new LinkedHashSet<>( @@ -92,12 +91,28 @@ public void visit_SparqlQueryResultDataSource_succeeds() throws MalformedURLExce } @Test - public void visit_InMemoryDataSource_returnsNull() { + public void visit_InMemoryDataSource_returnsNull() throws IOException { final InMemoryDataSource inMemoryDataSource = new VLogInMemoryDataSource(1, 1); assertEquals(null, toConfigString(inMemoryDataSource)); } - private String toConfigString(ReasonerDataSource dataSource) { + @Test + public void getDirCanonicalPath_relativePath_succeeds() throws IOException { + final VLogDataSourceConfigurationVisitor visitor = new VLogDataSourceConfigurationVisitor(); + final FileDataSource fileDataSource = new CsvFileDataSource("file.csv"); + final String currentFolder = new File(".").getCanonicalPath(); + assertEquals(currentFolder, visitor.getDirCanonicalPath(fileDataSource)); + } + + @Test + public void getDirCanonicalPath_nonNormalisedPath_succeeds() throws IOException { + final VLogDataSourceConfigurationVisitor visitor = new VLogDataSourceConfigurationVisitor(); + final FileDataSource fileDataSource = new CsvFileDataSource("./././file.csv"); + final String currentFolder = new File(".").getCanonicalPath(); + assertEquals(currentFolder, visitor.getDirCanonicalPath(fileDataSource)); + } + + private String toConfigString(ReasonerDataSource dataSource) throws IOException { VLogDataSourceConfigurationVisitor visitor = new VLogDataSourceConfigurationVisitor(); dataSource.accept(visitor); return visitor.getConfigString(); diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerBasics.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerBasics.java index ca6a3bed4..8e2b07f57 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerBasics.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerBasics.java @@ -12,9 +12,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -59,14 +59,14 @@ public class VLogReasonerBasics { @Test(expected = NullPointerException.class) public void testSetAlgorithmNull() { - try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new);) { + try (final Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.setAlgorithm(null); } } @Test(expected = NullPointerException.class) public void setRuleRewriteStrategy1() { - try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new);) { + try (final Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.setRuleRewriteStrategy(null); } } diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerStateTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerStateTest.java index 17f3dc8cc..0ea81efbb 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerStateTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerStateTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -68,21 +68,21 @@ public class VLogReasonerStateTest { @Test(expected = ReasonerStateException.class) public void testFailAnswerQueryBeforeLoad() { - try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { + try (final Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.answerQuery(exampleQueryAtom, true); } } @Test(expected = ReasonerStateException.class) public void testFailExportQueryAnswersBeforeLoad() throws IOException { - try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { + try (final Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.exportQueryAnswersToCsv(exampleQueryAtom, "", true); } } @Test(expected = ReasonerStateException.class) public void testFailAnswerQueryAfterReset() throws IOException { - try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { + try (final Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.reason(); reasoner.resetReasoner(); reasoner.answerQuery(exampleQueryAtom, true); @@ -91,7 +91,7 @@ public void testFailAnswerQueryAfterReset() throws IOException { @Test(expected = ReasonerStateException.class) public void testFailExportQueryAnswersAfterReset() throws IOException { - try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { + try (final Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.reason(); reasoner.resetReasoner(); reasoner.exportQueryAnswersToCsv(exampleQueryAtom, "", true); @@ -172,7 +172,7 @@ public void testAddFacts2() throws IOException { @Test public void testResetBeforeLoad() { - try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { + try (final Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.resetReasoner(); } } @@ -280,7 +280,7 @@ public void testResetEmptyKnowledgeBase() throws IOException { @Test(expected = ReasonerStateException.class) public void testFailExportQueryAnswerToCsvBeforeLoad() throws IOException { - try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { + try (final Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.exportQueryAnswersToCsv(exampleQueryAtom, FileDataSourceTestUtils.OUTPUT_FOLDER + "output.csv", true); } From f9e5dad35cb9e0a400fa1a88ca2358c0915e6741 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 19 May 2020 10:04:31 +0200 Subject: [PATCH 0613/1003] Enable deprecation warnings --- pom.xml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pom.xml b/pom.xml index bbe69eb09..5cff39168 100644 --- a/pom.xml +++ b/pom.xml @@ -228,6 +228,9 @@ 1.8 1.8 + + -Xlint:deprecation +
    From 0d8a7ec62993da766bbc193b33a3a29e02d01ec3 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 19 May 2020 10:04:48 +0200 Subject: [PATCH 0614/1003] VLog: Add more tests --- .../reasoner/vlog/VLogQueryResultUtils.java | 24 ++++++-- .../vlog/VLogReasonerWriteInferencesTest.java | 59 +++++++++++++++++++ 2 files changed, 79 insertions(+), 4 deletions(-) diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultUtils.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultUtils.java index d192add94..782123d7c 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultUtils.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultUtils.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,10 +22,14 @@ import static org.junit.Assert.assertTrue; +import java.io.IOException; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; +import java.util.function.BiConsumer; + +import org.semanticweb.rulewerk.core.model.api.Predicate; import karmaresearch.vlog.Term; import karmaresearch.vlog.TermQueryResultIterator; @@ -33,7 +37,7 @@ /** * Utility class with static methods used for collecting query results for * testing purposes. - * + * * @author Irina Dragoste * */ @@ -46,7 +50,7 @@ private VLogQueryResultUtils() { * Collects TermQueryResultIterator results into a Set. Transforms the array of * {@link Term}s into a set of {@link Term}s. Asserts that the results do not * contain duplicates. Closes the iterator after collecting the results. - * + * * @param queryResultIterator * @return a set of unique query result. A query result is a List of Term * tuples. @@ -61,4 +65,16 @@ static Set> collectResults(final TermQueryResultIterator queryResultI return answers; } + @SuppressWarnings("unchecked") + private static void sneakyThrow(Throwable e) throws E { + throw (E) e; + } + + /** + * Throw an {@link IOException}, uncheckedly. Needed for testing + * {@link VLogReasoner#unsafeForEachInference}. + */ + static void sneakilyThrowIOException() { + sneakyThrow(new IOException()); + } } diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java index 81835692d..e7342762d 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java @@ -25,16 +25,20 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.function.BiConsumer; import java.util.stream.Collectors; import java.util.stream.Stream; import org.junit.Before; import org.junit.Test; + import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.AbstractConstant; import org.semanticweb.rulewerk.core.model.api.Conjunction; import org.semanticweb.rulewerk.core.model.api.Constant; @@ -43,11 +47,14 @@ import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.api.UniversalVariable; import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.Reasoner.InferenceAction; import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; public class VLogReasonerWriteInferencesTest { @@ -123,6 +130,58 @@ public void writeInferences_withBase_writesBase() throws IOException, PrefixDecl assertTrue("the base declaration is present", getInferences().contains("@base .")); } + @Test + public void getInferences_example_succeeds() throws IOException { + final List inferences = getInferences(); + try (final Reasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + final List fromStream = reasoner.getInferences() + .map(Fact::getSyntacticRepresentation) + .collect(Collectors.toList()); + assertEquals(inferences, fromStream); + } + } + + @Test + public void unsafeForEachInference_example_succeeds() throws IOException { + final List inferences = getInferences(); + try (final Reasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + final List fromUnsafe = new ArrayList<>(); + + reasoner.unsafeForEachInference((Predicate, terms) -> { + fromUnsafe.add(Expressions.makeFact(Predicate, terms).getSyntacticRepresentation()); + }); + + assertEquals(inferences, fromUnsafe); + } + } + + @Test(expected = IOException.class) + public void forEachInference_throwingAction_throws() throws IOException { + InferenceAction action = mock(InferenceAction.class); + doThrow(IOException.class).when(action).accept(any(Predicate.class), anyList()); + try (final Reasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + reasoner.forEachInference(action); + } + } + + private class ThrowingConsumer implements BiConsumer> { + @Override + public void accept(Predicate predicate, List terms) { + VLogQueryResultUtils.sneakilyThrowIOException(); + } + } + + @Test(expected = RulewerkRuntimeException.class) + public void unsafeForEachInference_throwingAction_throws() throws IOException { + try (final Reasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + reasoner.unsafeForEachInference(new ThrowingConsumer()); + } + } + private List getInferences() throws IOException { try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); From 5a8e1eefa89f34caa53789b9ee5ee795a444cff0 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 19 May 2020 18:24:49 +0200 Subject: [PATCH 0615/1003] Address Review Comments --- .../rulewerk/core/reasoner/Reasoner.java | 3 -- .../CsvFileDataSourceTest.java | 8 +-- .../FileDataSourceTestUtils.java | 52 +++++++++++++++++++ .../RdfFileDataSourceTest.java | 7 +-- .../SparqlQueryResultDataSourceTest.java | 10 ++-- .../rulewerk/parser/DirectiveHandler.java | 6 +-- .../ImportFileRelativeDirectiveHandler.java | 5 +- .../parser/RuleParserParseFactTest.java | 11 ++-- .../rulewerk/reasoner/vlog/VLogReasoner.java | 4 +- .../reasoner/vlog/VLogToModelConverter.java | 23 ++++---- .../reasoner/vlog/VLogQueryResultUtils.java | 3 -- .../vlog/VLogReasonerWriteInferencesTest.java | 6 +-- 12 files changed, 79 insertions(+), 59 deletions(-) rename {rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog => rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation}/CsvFileDataSourceTest.java (82%) create mode 100644 rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java rename {rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog => rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation}/RdfFileDataSourceTest.java (87%) rename {rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog => rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation}/SparqlQueryResultDataSourceTest.java (86%) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java index dcc3cea7b..276398c62 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java @@ -26,8 +26,6 @@ import java.io.OutputStream; import java.util.List; import java.util.function.BiConsumer; -import java.util.function.Function; -import java.util.function.Supplier; import java.util.stream.Stream; import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; @@ -45,7 +43,6 @@ import org.semanticweb.rulewerk.core.model.api.Variable; import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.model.implementation.Serializer; -import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; /** * Interface that exposes the (existential) rule reasoning capabilities of a diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/CsvFileDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java similarity index 82% rename from rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/CsvFileDataSourceTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java index 593993f73..119e8057d 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/CsvFileDataSourceTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.rulewerk.reasoner.vlog; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -20,16 +20,11 @@ * #L% */ -import static org.junit.Assert.assertEquals; - import java.io.File; import java.io.IOException; import org.junit.Test; -import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; - public class CsvFileDataSourceTest { private final String ntFile = FileDataSourceTestUtils.INPUT_FOLDER + "file.nt"; @@ -48,7 +43,6 @@ public void testConstructorFalseExtension() throws IOException { @Test public void testConstructor() throws IOException { - final String dirCanonicalPath = new File(FileDataSourceTestUtils.INPUT_FOLDER).getCanonicalPath(); final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(csvFile); final CsvFileDataSource zippedCsvFileDataSource = new CsvFileDataSource(gzFile); diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java new file mode 100644 index 000000000..057ceba16 --- /dev/null +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java @@ -0,0 +1,52 @@ +package org.semanticweb.rulewerk.core.reasoner.implementation; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; + +import java.io.IOException; + +/** + * Utility class for reading from and writing to data source files. + * + * @author Christian Lewe + * @author Irina Dragoste + * + */ +public final class FileDataSourceTestUtils { + + public static final String INPUT_FOLDER = "src/test/data/input/"; + + /* + * This is a utility class. Therefore, it is best practice to do the following: + * (1) Make the class final, (2) make its constructor private, (3) make all its + * fields and methods static. This prevents the classes instantiation and + * inheritance. + */ + private FileDataSourceTestUtils() { + + } + + public static void testConstructor(final FileDataSource fileDataSource, final String expectedFileName) + throws IOException { + assertEquals(expectedFileName, fileDataSource.getName()); + } +} diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/RdfFileDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java similarity index 87% rename from rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/RdfFileDataSourceTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java index 0ab0a7989..3bb39bc8d 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/RdfFileDataSourceTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java @@ -1,6 +1,4 @@ -package org.semanticweb.rulewerk.reasoner.vlog; - -import static org.junit.Assert.assertEquals; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -27,9 +25,6 @@ import org.junit.Test; -import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; - public class RdfFileDataSourceTest { private final String unzippedRdfFile = FileDataSourceTestUtils.INPUT_FOLDER + "file.nt"; diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/SparqlQueryResultDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java similarity index 86% rename from rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/SparqlQueryResultDataSourceTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java index 81f6aaa92..6e99a539f 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/SparqlQueryResultDataSourceTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.rulewerk.reasoner.vlog; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,8 +20,6 @@ * #L% */ -import static org.junit.Assert.assertEquals; - import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; @@ -32,8 +30,6 @@ import org.junit.Test; import org.semanticweb.rulewerk.core.model.api.Variable; import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; public class SparqlQueryResultDataSourceTest { diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java index ae6c25251..76c75e716 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,13 +21,11 @@ */ import java.io.File; -import java.io.IOException; import java.net.MalformedURLException; import java.net.URI; import java.net.URL; import java.nio.file.InvalidPathException; import java.util.List; -import java.util.NoSuchElementException; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Term; diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java index 1ef7a4372..7de06a3ea 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -26,7 +26,6 @@ import java.util.List; import org.semanticweb.rulewerk.core.exceptions.RulewerkException; -import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.parser.DirectiveArgument; diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserParseFactTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserParseFactTest.java index dee7199c1..d39446d31 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserParseFactTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserParseFactTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,20 +20,15 @@ * #L% */ -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; import org.junit.Test; import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.Literal; -import org.semanticweb.rulewerk.core.model.api.NamedNull; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Rule; import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; -import org.semanticweb.rulewerk.parser.ParserConfiguration; -import org.semanticweb.rulewerk.parser.ParsingException; -import org.semanticweb.rulewerk.parser.RuleParser; public class RuleParserParseFactTest implements ParserTestUtils { diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java index 695be1fca..ea35c37f8 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java @@ -54,7 +54,6 @@ import org.semanticweb.rulewerk.core.reasoner.ReasonerState; import org.semanticweb.rulewerk.core.reasoner.RuleRewriteStrategy; import org.semanticweb.rulewerk.core.reasoner.implementation.EmptyQueryResultIterator; -import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.QueryAnswerCountImpl; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -231,7 +230,8 @@ void loadInMemoryDataSource(final DataSource dataSource, final Predicate predica } } - void load(final Predicate predicate, final VLogInMemoryDataSource inMemoryDataSource) throws EDBConfigurationException { + void load(final Predicate predicate, final VLogInMemoryDataSource inMemoryDataSource) + throws EDBConfigurationException { final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(predicate); this.vLog.addData(vLogPredicateName, inMemoryDataSource.getData()); diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java index 581e13368..c68d68234 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -31,7 +31,6 @@ import org.semanticweb.rulewerk.core.model.implementation.LanguageStringConstantImpl; import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; import org.semanticweb.rulewerk.core.model.implementation.Serializer; -import org.semanticweb.rulewerk.core.reasoner.implementation.QueryAnswerCountImpl; import org.semanticweb.rulewerk.core.reasoner.implementation.QueryResultImpl; /** @@ -84,15 +83,15 @@ static List toTermList(karmaresearch.vlog.Term[] vLogTerms) { static Term toTerm(karmaresearch.vlog.Term vLogTerm) { final String name = vLogTerm.getName(); switch (vLogTerm.getTermType()) { - case CONSTANT: - return toConstant(name); - case BLANK: - return new NamedNullImpl(name); - case VARIABLE: - throw new IllegalArgumentException( - "VLog variables cannot be converted without knowing if they are universally or existentially quantified."); - default: - throw new IllegalArgumentException("Unexpected VLog term type: " + vLogTerm.getTermType()); + case CONSTANT: + return toConstant(name); + case BLANK: + return new NamedNullImpl(name); + case VARIABLE: + throw new IllegalArgumentException( + "VLog variables cannot be converted without knowing if they are universally or existentially quantified."); + default: + throw new IllegalArgumentException("Unexpected VLog term type: " + vLogTerm.getTermType()); } } diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultUtils.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultUtils.java index 782123d7c..8f3d65308 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultUtils.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultUtils.java @@ -27,9 +27,6 @@ import java.util.HashSet; import java.util.List; import java.util.Set; -import java.util.function.BiConsumer; - -import org.semanticweb.rulewerk.core.model.api.Predicate; import karmaresearch.vlog.Term; import karmaresearch.vlog.TermQueryResultIterator; diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java index e7342762d..19365056e 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java @@ -51,7 +51,6 @@ import org.semanticweb.rulewerk.core.model.api.UniversalVariable; import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.core.reasoner.Reasoner.InferenceAction; @@ -135,9 +134,8 @@ public void getInferences_example_succeeds() throws IOException { final List inferences = getInferences(); try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - final List fromStream = reasoner.getInferences() - .map(Fact::getSyntacticRepresentation) - .collect(Collectors.toList()); + final List fromStream = reasoner.getInferences().map(Fact::getSyntacticRepresentation) + .collect(Collectors.toList()); assertEquals(inferences, fromStream); } } From 38a49793c88d1deda382b9d61b00d767bc175040 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 19 May 2020 19:52:17 +0200 Subject: [PATCH 0616/1003] rename vlog jar from vlog-base to vlog-java --- rulewerk-vlog/pom.xml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/rulewerk-vlog/pom.xml b/rulewerk-vlog/pom.xml index 6fd6c8d5b..d8438ed7d 100644 --- a/rulewerk-vlog/pom.xml +++ b/rulewerk-vlog/pom.xml @@ -18,6 +18,7 @@ 1.3.3-snapshot + vlog-java @@ -29,7 +30,7 @@ ${project.groupId} - vlog-base + ${karmaresearch.vlog.artifactId} ${karmaresearch.vlog.version} @@ -55,7 +56,7 @@ ${project.groupId} - vlog-base + ${karmaresearch.vlog.artifactId} ${karmaresearch.vlog.version} jar ./lib/jvlog-local.jar From 82551d63ad56b9e4a45c69236c1ac7d039aa82c3 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 20 May 2020 11:44:03 +0200 Subject: [PATCH 0617/1003] VLog: Fix detection of data statements during loading Fixes #173. --- .../reasoner/vlog/VLogKnowledgeBase.java | 2 +- .../reasoner/vlog/VLogKnowledgeBaseTest.java | 75 +++++++++++++++++++ 2 files changed, 76 insertions(+), 1 deletion(-) create mode 100644 rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBaseTest.java diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBase.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBase.java index 6af5a9c39..9f41cc7c9 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBase.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBase.java @@ -84,7 +84,7 @@ public class VLogKnowledgeBase { } boolean hasData() { - return !this.edbPredicates.isEmpty() && !this.aliasedEdbPredicates.isEmpty(); + return !this.edbPredicates.isEmpty() || !this.aliasedEdbPredicates.isEmpty(); } public boolean hasRules() { diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBaseTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBaseTest.java new file mode 100644 index 000000000..81ccbc147 --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBaseTest.java @@ -0,0 +1,75 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; + +public class VLogKnowledgeBaseTest { + private KnowledgeBase knowledgeBase = new KnowledgeBase(); + private Predicate p = Expressions.makePredicate("P", 1); + private Predicate q = Expressions.makePredicate("Q", 1); + private UniversalVariable x = Expressions.makeUniversalVariable("x"); + private AbstractConstant c = Expressions.makeAbstractConstant("c"); + private Fact fact = Expressions.makeFact(p, c); + private PositiveLiteral literal = Expressions.makePositiveLiteral(p, x); + private Rule rule = Expressions.makeRule(literal, literal); + + @Test + public void hasData_noData_returnsFalse() { + VLogKnowledgeBase vKB = new VLogKnowledgeBase(knowledgeBase); + assertFalse(vKB.hasData()); + } + + @Test + public void hasData_noAliasedPredicates_returnsTrue() { + knowledgeBase.addStatement(fact); + VLogKnowledgeBase vKB = new VLogKnowledgeBase(knowledgeBase); + assertTrue(vKB.hasData()); + } + + @Test + public void hasData_onlyAliasedPredicates_returnsTrue() { + knowledgeBase.addStatement(rule); + knowledgeBase.addStatement(fact); + VLogKnowledgeBase vKB = new VLogKnowledgeBase(knowledgeBase); + assertTrue(vKB.hasData()); + } + + @Test + public void hasData_bothUnaliasedAndAliasedPredicates_returnsTrue() { + knowledgeBase.addStatement(Expressions.makeFact(q, c)); + knowledgeBase.addStatement(rule); + knowledgeBase.addStatement(fact); + VLogKnowledgeBase vKB = new VLogKnowledgeBase(knowledgeBase); + assertTrue(vKB.hasData()); + } +} From a71d91464160dedd7cd32b9a3cc5a06dabcc8ae7 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Wed, 20 May 2020 11:51:36 +0200 Subject: [PATCH 0618/1003] added developers Larry and Ali --- pom.xml | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 5cff39168..391c9de20 100644 --- a/pom.xml +++ b/pom.xml @@ -20,10 +20,10 @@ rulewerk-core rulewerk-vlog rulewerk-rdf - rulewerk-examples rulewerk-owlapi rulewerk-graal rulewerk-parser + rulewerk-examples rulewerk-client coverage @@ -63,6 +63,16 @@ Maximilian Marx maximilian.marx@tu-dresden.de + + larry + Larry González + larry.gonzalez@tu-dresden.de + + + ali + Ali Elhalawati + ali.elhalawati@tu-dresden.de + From 0e34e3a505175e7ebd915b035bd29ee2292ce0b7 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 20 May 2020 12:30:59 +0200 Subject: [PATCH 0619/1003] VLog: Avoid duplicated code --- .../reasoner/vlog/VLogInMemoryDataSource.java | 18 +----------------- 1 file changed, 1 insertion(+), 17 deletions(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java index 9e5fabd64..c6952d09f 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java @@ -37,22 +37,6 @@ public VLogInMemoryDataSource(final int arity, final int initialCapacity) { this.data = new String[initialCapacity][arity]; } - /** - * Transforms a constant name in a format suitable for the - * reasoner. The default implementation assumes the VLog backend. - * @param constantName the name of the constant - * - * @return a transformed version of constantName that is suitable for the Reasoner. - */ - protected String transformConstantName(String constantName) { - if (!constantName.startsWith("\"") && constantName.contains(":")) { - // enclose IRIs with brackets - return "<" + constantName + ">"; - } - // it's either a datatype literal, or a relative IRI, leave it unchanged - return constantName; - } - /** * Adds a fact to this data source. The number of constant names must agree with * the arity of this data source. @@ -68,7 +52,7 @@ public void addTuple(final String... constantNames) { } this.data[this.nextEmptyTuple] = new String[this.arity]; for (int i = 0; i < this.arity; i++) { - this.data[this.nextEmptyTuple][i] = transformConstantName(constantNames[i]); + this.data[this.nextEmptyTuple][i] = TermToVLogConverter.getVLogNameForConstantName(constantNames[i]); } this.nextEmptyTuple++; } From 74ab5a9ee3fe7e2c01291a9ee3fa1462c558e4df Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Wed, 20 May 2020 12:41:07 +0200 Subject: [PATCH 0620/1003] eliminate duplicated code for getting VLog string representation of IRI --- .../reasoner/vlog/TermToVLogConverter.java | 46 ++++++++++--------- 1 file changed, 24 insertions(+), 22 deletions(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java index 90496ff76..13a078076 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java @@ -1,5 +1,11 @@ package org.semanticweb.rulewerk.reasoner.vlog; +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; +import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; +import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; + /*- * #%L * Rulewerk VLog Reasoner Support @@ -22,11 +28,6 @@ import org.semanticweb.rulewerk.core.model.api.NamedNull; import org.semanticweb.rulewerk.core.model.api.TermType; -import org.semanticweb.rulewerk.core.model.api.AbstractConstant; -import org.semanticweb.rulewerk.core.model.api.Constant; -import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; -import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; -import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; import org.semanticweb.rulewerk.core.model.api.TermVisitor; import org.semanticweb.rulewerk.core.model.api.UniversalVariable; import org.semanticweb.rulewerk.core.model.implementation.RenamedNamedNull; @@ -48,7 +49,7 @@ class TermToVLogConverter implements TermVisitor { * same name and type {@link karmaresearch.vlog.Term.TermType#CONSTANT}. */ @Override - public karmaresearch.vlog.Term visit(AbstractConstant term) { + public karmaresearch.vlog.Term visit(final AbstractConstant term) { return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, getVLogNameForConstant(term)); } @@ -57,7 +58,7 @@ public karmaresearch.vlog.Term visit(AbstractConstant term) { * same name and type {@link karmaresearch.vlog.Term.TermType#CONSTANT}. */ @Override - public karmaresearch.vlog.Term visit(DatatypeConstant term) { + public karmaresearch.vlog.Term visit(final DatatypeConstant term) { return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, term.getName()); } @@ -67,7 +68,7 @@ public karmaresearch.vlog.Term visit(DatatypeConstant term) { * {@link karmaresearch.vlog.Term.TermType#CONSTANT}. */ @Override - public karmaresearch.vlog.Term visit(LanguageStringConstant term) { + public karmaresearch.vlog.Term visit(final LanguageStringConstant term) { return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, term.getName()); } @@ -77,16 +78,12 @@ public karmaresearch.vlog.Term visit(LanguageStringConstant term) { * @param constant * @return VLog constant string */ - public static String getVLogNameForConstant(Constant constant) { + public static String getVLogNameForConstant(final Constant constant) { + final String constantName = constant.getName(); if (constant.getType() == TermType.ABSTRACT_CONSTANT) { - String rulewerkConstantName = constant.getName(); - if (rulewerkConstantName.contains(":")) { // enclose IRIs with < > - return "<" + rulewerkConstantName + ">"; - } else { // keep relative IRIs unchanged - return rulewerkConstantName; - } + return getVLogNameForIRI(constantName); } else { // datatype literal - return constant.getName(); + return constantName; } } @@ -96,7 +93,7 @@ public static String getVLogNameForConstant(Constant constant) { * @param named null * @return VLog constant string */ - public static String getVLogNameForNamedNull(NamedNull namedNull) { + public static String getVLogNameForNamedNull(final NamedNull namedNull) { if (namedNull instanceof RenamedNamedNull) { return namedNull.getName(); } else { @@ -111,10 +108,15 @@ public static String getVLogNameForNamedNull(NamedNull namedNull) { * @param rulewerkConstantName * @return VLog constant string */ - public static String getVLogNameForConstantName(String rulewerkConstantName) { + public static String getVLogNameForConstantName(final String rulewerkConstantName) { if (rulewerkConstantName.startsWith("\"")) { // keep datatype literal strings unchanged return rulewerkConstantName; - } else if (rulewerkConstantName.contains(":")) { // enclose IRIs with < > + } else + return getVLogNameForIRI(rulewerkConstantName); + } + + private static String getVLogNameForIRI(final String rulewerkConstantName) { + if (rulewerkConstantName.contains(":")) { // enclose absolute IRIs with < > return "<" + rulewerkConstantName + ">"; } else { // keep relative IRIs unchanged return rulewerkConstantName; @@ -126,7 +128,7 @@ public static String getVLogNameForConstantName(String rulewerkConstantName) { * same name and type {@link karmaresearch.vlog.Term.TermType#VARIABLE}. */ @Override - public karmaresearch.vlog.Term visit(UniversalVariable term) { + public karmaresearch.vlog.Term visit(final UniversalVariable term) { return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, term.getName()); } @@ -135,7 +137,7 @@ public karmaresearch.vlog.Term visit(UniversalVariable term) { * the same name and type {@link karmaresearch.vlog.Term.TermType#VARIABLE}. */ @Override - public karmaresearch.vlog.Term visit(ExistentialVariable term) { + public karmaresearch.vlog.Term visit(final ExistentialVariable term) { return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, "!" + term.getName()); } @@ -144,7 +146,7 @@ public karmaresearch.vlog.Term visit(ExistentialVariable term) { * name and type {@link karmaresearch.vlog.Term.TermType#BLANK}. */ @Override - public karmaresearch.vlog.Term visit(NamedNull term) { + public karmaresearch.vlog.Term visit(final NamedNull term) { return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, term.getName()); } From 6a128c9fb194d17424bbaba19403f5355f5eca62 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 21 May 2020 22:03:12 +0200 Subject: [PATCH 0621/1003] update reference to released vlog-java.1.3.3 --- rulewerk-vlog/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-vlog/pom.xml b/rulewerk-vlog/pom.xml index d8438ed7d..716c5e10d 100644 --- a/rulewerk-vlog/pom.xml +++ b/rulewerk-vlog/pom.xml @@ -17,7 +17,7 @@ Bindings for the VLog reasoner backend. - 1.3.3-snapshot + 1.3.3 vlog-java From b0bb1d82b85090060f62897757563ef3e6e1df39 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 21 May 2020 22:56:53 +0200 Subject: [PATCH 0622/1003] update release notes --- RELEASE-NOTES.md | 244 ++++++++++++++++++++++++----------------------- 1 file changed, 124 insertions(+), 120 deletions(-) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 9f52a1d57..8a2d82386 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -1,120 +1,124 @@ -Rulewerk Release Notes -==================== - -Rulewerk v0.6.0 -------------- - -Breaking changes: -* VLog4j is now called Rulewerk. -* In the example package, `ExamplesUtils.getQueryAnswerCount(queryString, reasoner)` does no - longer exist. It can be replaced by - `reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral(queryString)).getCount()` -* The `FileDataSource` constructor and those of derived classes now - take the path to a file instead of `File` object. -* The VLog backend has been moved to a new `rulewerk-vlog` module, - changing several import paths. `Reasoner.getInstance()` is - gone. Furthermore, InMemoryDataSource has become an abstract class, - use VLogInMemoryDataSource where applicable. - -New features: -* Counting query answers is more efficient now, using `Reasoner.countQueryAnswers()` -* All inferred facts can be serialized to a file using `Reasoner.writeInferences()` -* All inferred facts can be obtained as a Stream using `Reasoner.getInferences()` -* Knowledge bases can be serialized to a file using `KnowlegdeBase.writeKnowledgeBase()` -* Rules files may import other rules files using `@import` and - `@import-relative`, where the latter resolves relative IRIs using - the current base IRI, unless the imported file explicitly specifies - a different one. -* Named nulls of the form `_:name` are now allowed during parsing (but - may not occur in rule bodies). They are renamed to assure that they - are distinct on a per-file level. -* The parser allows custom directives to be implemented, and a certain - set of delimiters allows for custom literal expressions. - -Other improvements: -* Prefix declarations are now kept as part of the Knowledge Base and - are used to abbreviate names when exporting inferences. - - -VLog4j v0.5.0 -------------- - -Breaking changes: -* The data model for rules has been refined and changed: - * Instead of Constant, specific types of constants are used to capture abtract and data values - * Instead of Variable, ExistentialVariable and UniversalVariable now indicate quantification - * Blank was renamed to NamedNull to avoid confusion with RDF blank nodes - * Methods to access terms now use Java Streams and are unified across syntactic objects -* Data source declarations now use brackets to denote arity, e.g., `@source predicate[2]: load-csv()` - -New features: -* New module vlog4j-client provides a stand-alone command line client jar for VLog4j -* A wiki for VLog4j use and related publications has been created: https://github.com/knowsys/vlog4j/wiki -* The parser behaviour for data source declarations and certain datatype literals can be customised. - -Other improvements: -* Data model is better aligned with syntax supported by parser -* Java object Statements (rules, facts, datasource declarations) String representation is parseable -* OWL API dependency has been upgraded from 4.5.1 to latest (5.1.11) -* SL4J dependency has been upgraded from 1.7.10 to latest (1.7.28) -* Cobertura test coverage tool has been replaced by JaCoCo - -Bugfixes: -* Acyclicity checks work again without calling reason() first (issue #128) -* in vlog4j-owlapi, class expressions of type ObjectMaxCardinality are not allowed in superclasses (issue #104) -* in vlog4j-owlapi, class expressions of type ObjectOneOf are only allowed as subclasses in axioms of type subClassOf (issue #20) -* When parsing syntactic fragment such as Facts or Literals, the parser now enforces that all input is consumed. - -VLog4j v0.4.0 -------------- - -Breaking changes: -* The Reasoner interface has changed (knowledge base and related methods moved to KnowledgeBase) -* The EdbIdbSeparation is obsolete and does no longer exist -* IRIs loaded from RDF inputs no longer include surrounding < > in their string identifier -* A new interface Fact has replaced the overly general PositiveLiteral in many places - -New features: -* New own syntax for rules, facts, and data sources to create knowledge bases from files or strings in Java -* Input predicates can now be used with multiple sources and in rule heads (no more EDB-IDB distinction) -* New InMemoryDataSource for efficient in-memory fact loading -* New KnowledgeBase class separates facts, data sources, and rules from the actual Reasoner -* Modifications to the knowledge base are taken into account by the reasoner -* New and updated example programs to illustrate use of syntax - -Other improvements: -* Query results now indicate their guaranteed correctness (example: answers can be incomplete when setting a timeout) -* Faster and more memory-efficient loading of facts -* Better error reporting; improved use of exceptions -* Better logging, especially on the INFO level -* Better code structure and testing - -Bugfixes: -* Several reasoning errors in VLog (backend) have been discovered and fixed in the version used now - - -VLog4j v0.3.0 -------------- - -New features: -* Support for Graal data structures (conversion from Graal model to VLog model objects) -* Stratified negation: rule bodies are conjunctions of positive or negated literals -* SPARQL-based data sources: load remote data from SPARQL endpoints -* Acyclicity and cyclicity checks: JA, RJA, MFA, RMFA, RFC, as well as a generic method that checks whether given set or rules and fact predicates are acyclic, cyclic, or undetermined - -VLog4j v0.2.0 -------------- - -New features: -* supporting File data sources of N-Triples format (.nt file extension) -* supporting g-zipped data source files (.csv.gz, .nt.gz) - -VLog4j v0.1.0 -------------- - -Initial release. - -New features: -* Essential data models for rules and facts, and essential reasoner functionality -* support for reading from RDF files -* support for converting rules from OWL ontology, loaded with the OWL API +Rulewerk Release Notes +==================== + +Rulewerk v0.6.0 +------------- + +Breaking changes: +* VLog4j is now called Rulewerk. Consequently, the groupId, artifact Ids, and package names + of the project have changed. +* In the examples package, `ExamplesUtils.getQueryAnswerCount(queryString, reasoner)` does no + longer exist. It can be replaced by + `reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral(queryString)).getCount()` +* The `FileDataSource` constructor and those of child classes (`CsvFileDataSource`, `RdfFileDataSource`) + now take the String path to a file instead of `File` object. +* The VLog backend has been moved to a new `rulewerk-vlog` module, + changing several import paths. `Reasoner.getInstance()` is + gone. Furthermore, `InMemoryDataSource` has become an abstract class, + use `VLogInMemoryDataSource` where applicable. + +New features: +* Counting query answers is more efficient now, using `Reasoner.countQueryAnswers()` +* All inferred facts can be serialized to a file using `Reasoner.writeInferences()` +* All inferred facts can be obtained as a Stream using `Reasoner.getInferences()` +* `Reasoner.getCorrectness()` returns the correctness result of the last reasoning task. +* Knowledge bases can be serialized to a file using `KnowlegdeBase.writeKnowledgeBase()` +* Rules files may import other rules files using `@import` and + `@import-relative`, where the latter resolves relative IRIs using + the current base IRI, unless the imported file explicitly specifies + a different one. +* Named nulls of the form `_:name` are now allowed during parsing (but + may not occur in rule bodies). They are renamed to assure that they + are distinct on a per-file level. +* The parser allows custom directives to be implemented, and a certain + set of delimiters allows for custom literal expressions. + +Other improvements: +* Prefix declarations are now kept as part of the Knowledge Base and + are used to abbreviate names when exporting inferences. + +Bugfixes: +* Several reasoning errors in VLog (backend) have been discovered and fixed in the version used now + +VLog4j v0.5.0 +------------- + +Breaking changes: +* The data model for rules has been refined and changed: + * Instead of Constant, specific types of constants are used to capture abtract and data values + * Instead of Variable, ExistentialVariable and UniversalVariable now indicate quantification + * Blank was renamed to NamedNull to avoid confusion with RDF blank nodes + * Methods to access terms now use Java Streams and are unified across syntactic objects +* Data source declarations now use brackets to denote arity, e.g., `@source predicate[2]: load-csv()` + +New features: +* New module vlog4j-client provides a stand-alone command line client jar for VLog4j +* A wiki for VLog4j use and related publications has been created: https://github.com/knowsys/vlog4j/wiki +* The parser behaviour for data source declarations and certain datatype literals can be customised. + +Other improvements: +* Data model is better aligned with syntax supported by parser +* Java object Statements (rules, facts, datasource declarations) String representation is parseable +* OWL API dependency has been upgraded from 4.5.1 to latest (5.1.11) +* SL4J dependency has been upgraded from 1.7.10 to latest (1.7.28) +* Cobertura test coverage tool has been replaced by JaCoCo + +Bugfixes: +* Acyclicity checks work again without calling reason() first (issue #128) +* in vlog4j-owlapi, class expressions of type ObjectMaxCardinality are not allowed in superclasses (issue #104) +* in vlog4j-owlapi, class expressions of type ObjectOneOf are only allowed as subclasses in axioms of type subClassOf (issue #20) +* When parsing syntactic fragment such as Facts or Literals, the parser now enforces that all input is consumed. + +VLog4j v0.4.0 +------------- + +Breaking changes: +* The Reasoner interface has changed (knowledge base and related methods moved to KnowledgeBase) +* The EdbIdbSeparation is obsolete and does no longer exist +* IRIs loaded from RDF inputs no longer include surrounding < > in their string identifier +* A new interface Fact has replaced the overly general PositiveLiteral in many places + +New features: +* New own syntax for rules, facts, and data sources to create knowledge bases from files or strings in Java +* Input predicates can now be used with multiple sources and in rule heads (no more EDB-IDB distinction) +* New InMemoryDataSource for efficient in-memory fact loading +* New KnowledgeBase class separates facts, data sources, and rules from the actual Reasoner +* Modifications to the knowledge base are taken into account by the reasoner +* New and updated example programs to illustrate use of syntax + +Other improvements: +* Query results now indicate their guaranteed correctness (example: answers can be incomplete when setting a timeout) +* Faster and more memory-efficient loading of facts +* Better error reporting; improved use of exceptions +* Better logging, especially on the INFO level +* Better code structure and testing + +Bugfixes: +* Several reasoning errors in VLog (backend) have been discovered and fixed in the version used now + + +VLog4j v0.3.0 +------------- + +New features: +* Support for Graal data structures (conversion from Graal model to VLog model objects) +* Stratified negation: rule bodies are conjunctions of positive or negated literals +* SPARQL-based data sources: load remote data from SPARQL endpoints +* Acyclicity and cyclicity checks: JA, RJA, MFA, RMFA, RFC, as well as a generic method that checks whether given set or rules and fact predicates are acyclic, cyclic, or undetermined + +VLog4j v0.2.0 +------------- + +New features: +* supporting File data sources of N-Triples format (.nt file extension) +* supporting g-zipped data source files (.csv.gz, .nt.gz) + +VLog4j v0.1.0 +------------- + +Initial release. + +New features: +* Essential data models for rules and facts, and essential reasoner functionality +* support for reading from RDF files +* support for converting rules from OWL ontology, loaded with the OWL API From 3e6941f5533a375081325c21950c964529da4274 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Thu, 21 May 2020 23:16:09 +0200 Subject: [PATCH 0623/1003] update to release version 0.6.0 --- coverage/pom.xml | 2 +- pom.xml | 2 +- rulewerk-client/pom.xml | 2 +- rulewerk-core/pom.xml | 2 +- rulewerk-examples/pom.xml | 2 +- rulewerk-graal/pom.xml | 2 +- rulewerk-owlapi/pom.xml | 2 +- rulewerk-parser/pom.xml | 2 +- rulewerk-rdf/pom.xml | 2 +- rulewerk-vlog/pom.xml | 2 +- 10 files changed, 10 insertions(+), 10 deletions(-) diff --git a/coverage/pom.xml b/coverage/pom.xml index c91db4c28..06389c423 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0-SNAPSHOT + 0.6.0 coverage diff --git a/pom.xml b/pom.xml index 391c9de20..28934543e 100644 --- a/pom.xml +++ b/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0-SNAPSHOT + 0.6.0 pom Rulewerk diff --git a/rulewerk-client/pom.xml b/rulewerk-client/pom.xml index a3760ec8b..1c820bb25 100644 --- a/rulewerk-client/pom.xml +++ b/rulewerk-client/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0-SNAPSHOT + 0.6.0 rulewerk-client diff --git a/rulewerk-core/pom.xml b/rulewerk-core/pom.xml index 36a019c2c..ee5836588 100644 --- a/rulewerk-core/pom.xml +++ b/rulewerk-core/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0-SNAPSHOT + 0.6.0 rulewerk-core diff --git a/rulewerk-examples/pom.xml b/rulewerk-examples/pom.xml index 84344dd75..1a255eea2 100644 --- a/rulewerk-examples/pom.xml +++ b/rulewerk-examples/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0-SNAPSHOT + 0.6.0 rulewerk-examples diff --git a/rulewerk-graal/pom.xml b/rulewerk-graal/pom.xml index f58afbd16..93c167727 100644 --- a/rulewerk-graal/pom.xml +++ b/rulewerk-graal/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0-SNAPSHOT + 0.6.0 rulewerk-graal diff --git a/rulewerk-owlapi/pom.xml b/rulewerk-owlapi/pom.xml index 6e9fab4c3..28e8e7fd8 100644 --- a/rulewerk-owlapi/pom.xml +++ b/rulewerk-owlapi/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0-SNAPSHOT + 0.6.0 rulewerk-owlapi diff --git a/rulewerk-parser/pom.xml b/rulewerk-parser/pom.xml index 87da11c8d..c76439ecd 100644 --- a/rulewerk-parser/pom.xml +++ b/rulewerk-parser/pom.xml @@ -8,7 +8,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0-SNAPSHOT + 0.6.0 rulewerk-parser diff --git a/rulewerk-rdf/pom.xml b/rulewerk-rdf/pom.xml index efed746b6..4716807f8 100644 --- a/rulewerk-rdf/pom.xml +++ b/rulewerk-rdf/pom.xml @@ -8,7 +8,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0-SNAPSHOT + 0.6.0 rulewerk-rdf diff --git a/rulewerk-vlog/pom.xml b/rulewerk-vlog/pom.xml index 716c5e10d..c364a64d1 100644 --- a/rulewerk-vlog/pom.xml +++ b/rulewerk-vlog/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0-SNAPSHOT + 0.6.0 rulewerk-vlog From c234d6c99bbcc834dc141f97bbd4df55b3430960 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Thu, 21 May 2020 23:25:44 +0200 Subject: [PATCH 0624/1003] update README to point to release 0.6.0 --- README.md | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 11ee4e1fc..47b38c0f1 100644 --- a/README.md +++ b/README.md @@ -9,16 +9,19 @@ A Java library based on the [VLog rule engine](https://github.com/karmaresearch/ Installation ------------ -The current release of Rulewerk is version 0.5.0 and was released as *vlog4j* (all future releases will be published as *rulewerk*). The easiest way of using the library is with Maven. Maven users must add the following dependency to the dependencies in their pom.xml file: +The current release of Rulewerk is version 0.6.0. The easiest way of using the library is with Maven. Maven users must add the following dependency to the dependencies in their pom.xml file: ``` - org.semanticweb.vlog4j - vlog4j-core - 0.5.0 + org.semanticweb.rulewerk + rulewerk-core + 0.6.0 ``` +Previous to version `0.6.0`, *rulewerk* project name was *vlog4j*. Older versions released under name *vlog4j* have `org.semanticweb.vlog4j` and `vlog4j-core`, the latest version being version `0.5.0`. + + You need to use Java 1.8 or above. Available modules include: * **rulewerk-core**: essential data models for rules and facts, and essential reasoner functionality From b90e209ea23324974a491d26ef1a2f7d2ecdb584 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Fri, 22 May 2020 01:23:32 +0200 Subject: [PATCH 0625/1003] update to snapshot version --- coverage/pom.xml | 2 +- pom.xml | 2 +- rulewerk-client/pom.xml | 2 +- rulewerk-core/pom.xml | 2 +- rulewerk-examples/pom.xml | 2 +- rulewerk-graal/pom.xml | 2 +- rulewerk-owlapi/pom.xml | 2 +- rulewerk-parser/pom.xml | 2 +- rulewerk-rdf/pom.xml | 2 +- rulewerk-vlog/pom.xml | 2 +- 10 files changed, 10 insertions(+), 10 deletions(-) diff --git a/coverage/pom.xml b/coverage/pom.xml index 06389c423..b65b563fa 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0 + 0.7.0-SNAPSHOT coverage diff --git a/pom.xml b/pom.xml index 28934543e..47efeb4a6 100644 --- a/pom.xml +++ b/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0 + 0.7.0-SNAPSHOT pom Rulewerk diff --git a/rulewerk-client/pom.xml b/rulewerk-client/pom.xml index 1c820bb25..64f043d72 100644 --- a/rulewerk-client/pom.xml +++ b/rulewerk-client/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0 + 0.7.0-SNAPSHOT rulewerk-client diff --git a/rulewerk-core/pom.xml b/rulewerk-core/pom.xml index ee5836588..34701907a 100644 --- a/rulewerk-core/pom.xml +++ b/rulewerk-core/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0 + 0.7.0-SNAPSHOT rulewerk-core diff --git a/rulewerk-examples/pom.xml b/rulewerk-examples/pom.xml index 1a255eea2..d4abb343b 100644 --- a/rulewerk-examples/pom.xml +++ b/rulewerk-examples/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0 + 0.7.0-SNAPSHOT rulewerk-examples diff --git a/rulewerk-graal/pom.xml b/rulewerk-graal/pom.xml index 93c167727..e5621cfbb 100644 --- a/rulewerk-graal/pom.xml +++ b/rulewerk-graal/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0 + 0.7.0-SNAPSHOT rulewerk-graal diff --git a/rulewerk-owlapi/pom.xml b/rulewerk-owlapi/pom.xml index 28e8e7fd8..d351dd2c3 100644 --- a/rulewerk-owlapi/pom.xml +++ b/rulewerk-owlapi/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0 + 0.7.0-SNAPSHOT rulewerk-owlapi diff --git a/rulewerk-parser/pom.xml b/rulewerk-parser/pom.xml index c76439ecd..75487af58 100644 --- a/rulewerk-parser/pom.xml +++ b/rulewerk-parser/pom.xml @@ -8,7 +8,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0 + 0.7.0-SNAPSHOT rulewerk-parser diff --git a/rulewerk-rdf/pom.xml b/rulewerk-rdf/pom.xml index 4716807f8..16e796b43 100644 --- a/rulewerk-rdf/pom.xml +++ b/rulewerk-rdf/pom.xml @@ -8,7 +8,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0 + 0.7.0-SNAPSHOT rulewerk-rdf diff --git a/rulewerk-vlog/pom.xml b/rulewerk-vlog/pom.xml index c364a64d1..5e767200f 100644 --- a/rulewerk-vlog/pom.xml +++ b/rulewerk-vlog/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0 + 0.7.0-SNAPSHOT rulewerk-vlog From 506af489b03821d9059609adf5ffd782e70a78b2 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Fri, 22 May 2020 01:45:10 +0200 Subject: [PATCH 0626/1003] fix javadoc --- .../rulewerk/core/reasoner/KnowledgeBase.java | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java index 7765364c6..ad03ba16e 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -195,7 +195,7 @@ public Void visit(final DataSourceDeclaration statement) { /** * Registers a listener for changes on the knowledge base * - * @param listener + * @param listener a KnowledgeBaseListener */ public void addListener(final KnowledgeBaseListener listener) { this.listeners.add(listener); @@ -204,7 +204,7 @@ public void addListener(final KnowledgeBaseListener listener) { /** * Unregisters given listener from changes on the knowledge base * - * @param listener + * @param listener KnowledgeBaseListener */ public void deleteListener(final KnowledgeBaseListener listener) { this.listeners.remove(listener); @@ -478,7 +478,7 @@ public void importRulesFile(File file, AdditionalInputParser parseFunction) throws RulewerkException, IOException, IllegalArgumentException { Validate.notNull(file, "file must not be null"); - boolean isNewFile = importedFilePaths.add(file.getCanonicalPath()); + boolean isNewFile = this.importedFilePaths.add(file.getCanonicalPath()); Validate.isTrue(isNewFile, "file \"" + file.getName() + "\" was already imported."); try (InputStream stream = new FileInputStream(file)) { @@ -564,22 +564,22 @@ public String unresolveAbsoluteIri(String iri) { * * @param stream the {@link OutputStream} to serialise to. * - * @throws IOException + * @throws IOException if an I/O error occurs while writing to given output stream */ public void writeKnowledgeBase(OutputStream stream) throws IOException { stream.write(Serializer.getBaseAndPrefixDeclarations(this).getBytes()); - for (DataSourceDeclaration dataSource : getDataSourceDeclarations()) { + for (DataSourceDeclaration dataSource : this.getDataSourceDeclarations()) { stream.write(Serializer.getString(dataSource).getBytes()); stream.write('\n'); } - for (Rule rule : getRules()) { + for (Rule rule : this.getRules()) { stream.write(Serializer.getString(rule).getBytes()); stream.write('\n'); } - for (Fact fact : getFacts()) { + for (Fact fact : this.getFacts()) { stream.write(Serializer.getFactString(fact).getBytes()); stream.write('\n'); } @@ -594,7 +594,7 @@ public void writeKnowledgeBase(OutputStream stream) throws IOException { */ public void writeKnowledgeBase(String filePath) throws IOException { try (OutputStream stream = new FileOutputStream(filePath)) { - writeKnowledgeBase(stream); + this.writeKnowledgeBase(stream); } } } From f79c14cfd4f9e5fdef7e9f14705251e865047a95 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 4 Jun 2020 17:02:23 +0200 Subject: [PATCH 0627/1003] Parser: Allow absolute IRIs in Rule Bodies Fixes #178. --- .../rulewerk/parser/javacc/JavaCCParser.jj | 2 +- .../rulewerk/parser/RuleParserTest.java | 15 +++++++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj index 0d633b1ad..7a80fd52c 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj @@ -474,7 +474,7 @@ MORE : { "@": DIRECTIVE } -< DEFAULT, TERM, DIRECTIVE_ARGUMENTS > MORE : { +< DEFAULT, TERM, DIRECTIVE_ARGUMENTS, BODY > MORE : { "<" { pushState(); } : ABSOLUTE_IRI } diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java index f3e030e02..271a49598 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java @@ -448,6 +448,21 @@ public void predicateAbsoluteIRITest() throws ParsingException { assertEquals(f, f2); } + @Test + public void parse_absoluteIriInRuleHead_succeeds() throws ParsingException { + RuleParser.parseRule("(?x) :- B(?x), C(?x) ."); + } + + @Test + public void parse_absoluteIriInRuleBody_succeeds() throws ParsingException { + RuleParser.parseRule("A(?x) :- B(?x), (?x) ."); + } + + @Test + public void parse_absoluteIrisInRule_succeeds() throws ParsingException { + RuleParser.parseRule("(?x) :- B(?x), (?x) ."); + } + @Test public void testCustomDatatype() throws ParsingException { final String typename = "http://example.org/#test"; From 7fa2ed7d6c6878881b1b25fac57ae29bf6698611 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 10 Jun 2020 17:41:10 +0200 Subject: [PATCH 0628/1003] Improved example code to count only proper triangles --- RELEASE-NOTES.md | 11 +++++++++-- .../examples/InMemoryGraphAnalysisExample.java | 10 ++++++---- 2 files changed, 15 insertions(+), 6 deletions(-) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 8a2d82386..eb880ff50 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -1,8 +1,15 @@ Rulewerk Release Notes -==================== +====================== + +Rulewerk v0.7.0 +--------------- + +Other improvements: +* InMemoryGraphAnalysisExample now counts proper triangles using negation to avoid "triangles" where + two or more edges are the same. Rulewerk v0.6.0 -------------- +--------------- Breaking changes: * VLog4j is now called Rulewerk. Consequently, the groupId, artifact Ids, and package names diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java index 838ea639e..9bec5600e 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java @@ -78,7 +78,9 @@ public static void main(final String[] args) throws ParsingException, IOExceptio + "connected(v1) ." // + "connected(?X) :- connected(?Y), biedge(?Y,?X) ." // + "unreachable(?X) :- vertex(?X), ~connected(?X) . " // - + "triangle(?X, ?Y, ?Z) :- biedge(?X,?Y), biedge(?Y, ?Z), biedge(?Z,?X) ."; + + "triangle(?X, ?Y, ?Z) :- biedge(?X,?Y), biedge(?Y, ?Z), biedge(?Z,?X) ." // + + "loop(?X,?X) :- edge(?X,?X) . " // + + "properTriangle(?X, ?Y, ?Z) :- triangle(?X,?Y,?Z), ~loop(?X,?Y), ~loop(?Y, ?Z), ~loop(?Z, ?X) . "; final KnowledgeBase kb = RuleParser.parse(rules); kb.addStatement(new DataSourceDeclarationImpl(Expressions.makePredicate("vertex", 1), vertices)); @@ -88,14 +90,14 @@ public static void main(final String[] args) throws ParsingException, IOExceptio try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - final double unreachable = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("unreachable(?X)")) + final long unreachable = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("unreachable(?X)")) .getCount(); - final double triangles = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("triangle(?X,?Y,?Z)")) + final long triangles = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("properTriangle(?X,?Y,?Z)")) .getCount(); System.out .println("Number of vertices not reachable from vertex 1 by a bi-directional path: " + unreachable); - System.out.println("Number of bi-directional triangles: " + (triangles / 6)); + System.out.println("Number of proper bi-directional triangles: " + (triangles / 6) + " (found in " + triangles + " matches due to symmetry.)"); } } From ae39a8ac83fc0643d67ce15e1ee19e122cc63a54 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 17 Jun 2020 16:17:50 +0200 Subject: [PATCH 0629/1003] Replace VLogQueryResultIterator by faster implementation * New class avoids use of VLog's TermQueryResultIterator * Hence no intermediate string representations of results * Two forms of caching reduce dictionary lookups and number of term objects in RuleWerk --- .../vlog/VLogFastQueryResultIterator.java | 183 ++++++++++++++++++ .../vlog/VLogQueryResultIterator.java | 70 ------- .../rulewerk/reasoner/vlog/VLogReasoner.java | 102 ++++++---- .../reasoner/vlog/VLogToModelConverter.java | 2 +- 4 files changed, 253 insertions(+), 104 deletions(-) create mode 100644 rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java delete mode 100644 rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultIterator.java diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java new file mode 100644 index 000000000..00a53bba9 --- /dev/null +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java @@ -0,0 +1,183 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +/* + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.QueryResultImpl; + +import karmaresearch.vlog.NotStartedException; +import karmaresearch.vlog.VLog; + +/** + * Iterates trough all answers to a query. An answer to a query is a + * {@link QueryResult}. Each query answer is distinct. + * + * @author Markus Kroetzsch + * + */ +public class VLogFastQueryResultIterator implements QueryResultIterator { + + /** + * Use of Java's LinkedHashMap for implementing a simple LRU cache that is used + * here for mapping VLog ids to terms. + * + * @author Markus Kroetzsch + * + * @param + * @param + */ + public static class SimpleLruMap extends LinkedHashMap { + private static final long serialVersionUID = 7151535464938775359L; + private int maxCapacity; + + public SimpleLruMap(int initialCapacity, int maxCapacity) { + super(initialCapacity, 0.75f, true); + this.maxCapacity = maxCapacity; + } + + @Override + protected boolean removeEldestEntry(Map.Entry eldest) { + return size() >= this.maxCapacity; + } + } + + /** + * The internal result iterator of VLog, returning numeric ids only. + */ + private final karmaresearch.vlog.QueryResultIterator vLogQueryResultIterator; + /** + * The VLog instance. Used for resolving numeric ids to term names. + */ + private final VLog vlog; + /** + * VLog ids of the previous tuple, with the last id omitted (since it is not + * useful in caching). + */ + private long[] prevIds = null; + /** + * RuleWerk terms corresponding to the previously fetched tuple, with the last + * term omitted. + */ + private Term[] prevTerms = null; + /** + * True if this is the first result that is returned. + */ + boolean firstResult = true; + /** + * Size of the tuples returned in this result. + */ + int resultSize = -1; + /** + * LRU cache mapping ids to terms. + */ + final SimpleLruMap termCache; + + private final Correctness correctness; + + /** + * Create a new {@link VLogFastQueryResultIterator}. + * + * @param queryResultIterator + * @param materialisationState + * @param vLog + */ + public VLogFastQueryResultIterator(final karmaresearch.vlog.QueryResultIterator queryResultIterator, + final Correctness materialisationState, final VLog vLog) { + this.vLogQueryResultIterator = queryResultIterator; + this.correctness = materialisationState; + this.vlog = vLog; + this.termCache = new SimpleLruMap(256, 16384); + } + + @Override + public boolean hasNext() { + return this.vLogQueryResultIterator.hasNext(); + } + + @Override + public QueryResult next() { + final Term[] terms; + long[] idTuple = vLogQueryResultIterator.next(); + terms = new Term[idTuple.length]; + + if (firstResult) { + resultSize = terms.length; + prevIds = new long[resultSize - 1]; + prevTerms = new Term[resultSize - 1]; + } + + int i = 0; + for (long id : idTuple) { + if (!firstResult && i < resultSize - 1 && prevIds[i] == id) { + terms[i] = prevTerms[i]; + } else { + Term term = this.termCache.get(id); + if (term == null) { + try { + String s = vlog.getConstant(id); + // This internal handling is copied from VLog's code in {@link + // karmaresearch.vlog.TermQueryResultIterator}. + // TODO: the string operation to make null names should possibly be provided by + // VLog rather than being hardcoded here? + if (s == null) { + term = new NamedNullImpl( + "" + (id >> 40) + "_" + ((id >> 32) & 0377) + "_" + (id & 0xffffffffL)); + } else { + term = VLogToModelConverter.toConstant(s); + } + } catch (NotStartedException e) { + // Should not happen, we just did a query ... + throw new RuntimeException(e); + } + this.termCache.put(id, term); + } + terms[i] = term; + if (i < resultSize - 1) { + prevTerms[i] = term; + prevIds[i] = id; + } + } + i++; + } + + firstResult = false; + return new QueryResultImpl(List.of(terms)); + } + + @Override + public void close() { + this.vLogQueryResultIterator.close(); + } + + @Override + public Correctness getCorrectness() { + return this.correctness; + } + +} diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultIterator.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultIterator.java deleted file mode 100644 index 35dc7f75c..000000000 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultIterator.java +++ /dev/null @@ -1,70 +0,0 @@ -package org.semanticweb.rulewerk.reasoner.vlog; - -/* - * #%L - * Rulewerk VLog Reasoner Support - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import org.semanticweb.rulewerk.core.model.api.QueryResult; -import org.semanticweb.rulewerk.core.reasoner.Correctness; -import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; - -import karmaresearch.vlog.Term; -import karmaresearch.vlog.TermQueryResultIterator; - -/** - * Iterates trough all answers to a query. An answer to a query is a - * {@link QueryResult}. Each query answer is distinct. - * - * @author Irina Dragoste - * - */ -public class VLogQueryResultIterator implements QueryResultIterator { - - private final TermQueryResultIterator vLogTermQueryResultIterator; - - private final Correctness correctness; - - public VLogQueryResultIterator(final TermQueryResultIterator termQueryResultIterator, - final Correctness materialisationState) { - this.vLogTermQueryResultIterator = termQueryResultIterator; - this.correctness = materialisationState; - } - - @Override - public boolean hasNext() { - return this.vLogTermQueryResultIterator.hasNext(); - } - - @Override - public QueryResult next() { - final Term[] vLogQueryResult = this.vLogTermQueryResultIterator.next(); - return VLogToModelConverter.toQueryResult(vLogQueryResult); - } - - @Override - public void close() { - this.vLogTermQueryResultIterator.close(); - } - - @Override - public Correctness getCorrectness() { - return this.correctness; - } - -} diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java index ea35c37f8..35c735e73 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java @@ -79,7 +79,7 @@ public class VLogReasoner implements Reasoner { private static Logger LOGGER = LoggerFactory.getLogger(VLogReasoner.class); final KnowledgeBase knowledgeBase; - final VLog vLog = new VLog(); + public final VLog vLog = new VLog(); private ReasonerState reasonerState = ReasonerState.KB_NOT_LOADED; private Correctness correctness = Correctness.SOUND_BUT_INCOMPLETE; @@ -158,18 +158,18 @@ void load() throws IOException { validateNotClosed(); switch (this.reasonerState) { - case KB_NOT_LOADED: - loadKnowledgeBase(); - break; - case KB_LOADED: - case MATERIALISED: - // do nothing, all KB is already loaded - break; - case KB_CHANGED: - resetReasoner(); - loadKnowledgeBase(); - default: - break; + case KB_NOT_LOADED: + loadKnowledgeBase(); + break; + case KB_LOADED: + case MATERIALISED: + // do nothing, all KB is already loaded + break; + case KB_CHANGED: + resetReasoner(); + loadKnowledgeBase(); + default: + break; } } @@ -328,23 +328,23 @@ public boolean reason() throws IOException { validateNotClosed(); switch (this.reasonerState) { - case KB_NOT_LOADED: - load(); - runChase(); - break; - case KB_LOADED: - runChase(); - break; - case KB_CHANGED: - resetReasoner(); - load(); - runChase(); - break; - case MATERIALISED: - runChase(); - break; - default: - break; + case KB_NOT_LOADED: + load(); + runChase(); + break; + case KB_LOADED: + runChase(); + break; + case KB_CHANGED: + resetReasoner(); + load(); + runChase(); + break; + case MATERIALISED: + runChase(); + break; + default: + break; } return this.reasoningCompleted; @@ -388,9 +388,12 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNul final boolean filterBlanks = !includeNulls; final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); - TermQueryResultIterator stringQueryResultIterator; + karmaresearch.vlog.QueryResultIterator queryResultIterator; + try { - stringQueryResultIterator = this.vLog.query(vLogAtom, true, filterBlanks); + final int predicateId = this.vLog.getPredicateId(vLogAtom.getPredicate()); + final long[] terms = extractTerms(vLogAtom.getTerms()); + queryResultIterator = this.vLog.query(predicateId, terms, true, filterBlanks); } catch (final NotStartedException e) { throw new RulewerkRuntimeException("Inconsistent reasoner state.", e); } catch (final NonExistingPredicateException e1) { @@ -400,7 +403,40 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNul } logWarningOnCorrectness(); - return new VLogQueryResultIterator(stringQueryResultIterator, this.correctness); + return new VLogFastQueryResultIterator(queryResultIterator, this.correctness, this.vLog); + } + + /** + * Utility method copied from {@link karmaresearch.vlog.VLog}. + * + * @FIXME This should be provided by VLog and made visible to us rather than + * being copied here. + * @param terms + * @return + * @throws NotStartedException + */ + private long[] extractTerms(karmaresearch.vlog.Term[] terms) throws NotStartedException { + ArrayList variables = new ArrayList<>(); + long[] longTerms = new long[terms.length]; + for (int i = 0; i < terms.length; i++) { + if (terms[i].getTermType() == karmaresearch.vlog.Term.TermType.VARIABLE) { + boolean found = false; + for (int j = 0; j < variables.size(); j++) { + if (variables.get(j).equals(terms[i].getName())) { + found = true; + longTerms[i] = -j - 1; + break; + } + } + if (!found) { + variables.add(terms[i].getName()); + longTerms[i] = -variables.size(); + } + } else { + longTerms[i] = this.vLog.getOrAddConstantId(terms[i].getName()); + } + } + return longTerms; } @Override diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java index c68d68234..f313fdefd 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java @@ -101,7 +101,7 @@ static Term toTerm(karmaresearch.vlog.Term vLogTerm) { * @param vLogConstantName the string name used by VLog * @return {@link Constant} object */ - private static Constant toConstant(String vLogConstantName) { + static Constant toConstant(String vLogConstantName) { final Constant constant; if (vLogConstantName.charAt(0) == Serializer.LESS_THAN && vLogConstantName.charAt(vLogConstantName.length() - 1) == Serializer.MORE_THAN) { From 5ed54877f04b12ce225b783a0d69cb2b7358f400 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 17 Jun 2020 16:19:28 +0200 Subject: [PATCH 0630/1003] Mention improvement --- RELEASE-NOTES.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index eb880ff50..79f9edb8a 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -4,6 +4,9 @@ Rulewerk Release Notes Rulewerk v0.7.0 --------------- +New features: +* Significant speedup in iterating over query results + Other improvements: * InMemoryGraphAnalysisExample now counts proper triangles using negation to avoid "triangles" where two or more edges are the same. From 5412efcba88e50b52757b4ee203835074c973b36 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 17 Jun 2020 18:11:03 +0200 Subject: [PATCH 0631/1003] Arrays.asList seems faster than List.of --- .../reasoner/vlog/VLogFastQueryResultIterator.java | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java index 00a53bba9..875ad3b43 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java @@ -1,7 +1,7 @@ package org.semanticweb.rulewerk.reasoner.vlog; +import java.util.Arrays; import java.util.LinkedHashMap; -import java.util.List; import java.util.Map; /* @@ -42,7 +42,7 @@ * */ public class VLogFastQueryResultIterator implements QueryResultIterator { - + /** * Use of Java's LinkedHashMap for implementing a simple LRU cache that is used * here for mapping VLog ids to terms. @@ -74,7 +74,7 @@ protected boolean removeEldestEntry(Map.Entry eldest) { /** * The VLog instance. Used for resolving numeric ids to term names. */ - private final VLog vlog; + private final VLog vLog; /** * VLog ids of the previous tuple, with the last id omitted (since it is not * useful in caching). @@ -111,7 +111,7 @@ public VLogFastQueryResultIterator(final karmaresearch.vlog.QueryResultIterator final Correctness materialisationState, final VLog vLog) { this.vLogQueryResultIterator = queryResultIterator; this.correctness = materialisationState; - this.vlog = vLog; + this.vLog = vLog; this.termCache = new SimpleLruMap(256, 16384); } @@ -140,7 +140,7 @@ public QueryResult next() { Term term = this.termCache.get(id); if (term == null) { try { - String s = vlog.getConstant(id); + String s = vLog.getConstant(id); // This internal handling is copied from VLog's code in {@link // karmaresearch.vlog.TermQueryResultIterator}. // TODO: the string operation to make null names should possibly be provided by @@ -167,7 +167,7 @@ public QueryResult next() { } firstResult = false; - return new QueryResultImpl(List.of(terms)); + return new QueryResultImpl(Arrays.asList(terms)); } @Override From 39df1cc3a25d0b95f758a34d61b3133687e450a6 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 17 Jun 2020 20:52:00 +0200 Subject: [PATCH 0632/1003] Disable caching for unary queries --- .../vlog/VLogFastQueryResultIterator.java | 45 ++++++++++++------- 1 file changed, 28 insertions(+), 17 deletions(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java index 875ad3b43..c42a7c9d0 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java @@ -42,7 +42,7 @@ * */ public class VLogFastQueryResultIterator implements QueryResultIterator { - + /** * Use of Java's LinkedHashMap for implementing a simple LRU cache that is used * here for mapping VLog ids to terms. @@ -136,25 +136,12 @@ public QueryResult next() { for (long id : idTuple) { if (!firstResult && i < resultSize - 1 && prevIds[i] == id) { terms[i] = prevTerms[i]; + } else if (resultSize == 1) { // caching pointless for unary queries + terms[i] = computeTerm(id); } else { Term term = this.termCache.get(id); if (term == null) { - try { - String s = vLog.getConstant(id); - // This internal handling is copied from VLog's code in {@link - // karmaresearch.vlog.TermQueryResultIterator}. - // TODO: the string operation to make null names should possibly be provided by - // VLog rather than being hardcoded here? - if (s == null) { - term = new NamedNullImpl( - "" + (id >> 40) + "_" + ((id >> 32) & 0377) + "_" + (id & 0xffffffffL)); - } else { - term = VLogToModelConverter.toConstant(s); - } - } catch (NotStartedException e) { - // Should not happen, we just did a query ... - throw new RuntimeException(e); - } + term = computeTerm(id); this.termCache.put(id, term); } terms[i] = term; @@ -170,6 +157,30 @@ public QueryResult next() { return new QueryResultImpl(Arrays.asList(terms)); } + /** + * Compute the {@link Term} for a given VLog id. + * + * @param id + * @return + */ + Term computeTerm(long id) { + try { + String s = vLog.getConstant(id); + // This internal handling is copied from VLog's code in {@link + // karmaresearch.vlog.TermQueryResultIterator}. + // TODO: the string operation to make null names should possibly be provided by + // VLog rather than being hardcoded here? + if (s == null) { + return new NamedNullImpl("" + (id >> 40) + "_" + ((id >> 32) & 0377) + "_" + (id & 0xffffffffL)); + } else { + return VLogToModelConverter.toConstant(s); + } + } catch (NotStartedException e) { + // Should not happen, we just did a query ... + throw new RuntimeException(e); + } + } + @Override public void close() { this.vLogQueryResultIterator.close(); From 222e994e4cc067b3e9954093f1ed16ae2587a93a Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 17 Jun 2020 21:45:41 +0200 Subject: [PATCH 0633/1003] Optimise caching strategy for ordered data --- .../vlog/VLogFastQueryResultIterator.java | 76 ++++++++++++++----- 1 file changed, 58 insertions(+), 18 deletions(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java index c42a7c9d0..2a72d738f 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java @@ -1,8 +1,7 @@ package org.semanticweb.rulewerk.reasoner.vlog; import java.util.Arrays; -import java.util.LinkedHashMap; -import java.util.Map; +import java.util.HashMap; /* * #%L @@ -43,27 +42,66 @@ */ public class VLogFastQueryResultIterator implements QueryResultIterator { +// /** +// * Use of Java's LinkedHashMap for implementing a simple LRU cache that is used +// * here for mapping VLog ids to terms. +// * +// * @author Markus Kroetzsch +// * +// * @param +// * @param +// */ +// static class SimpleLruMap extends LinkedHashMap { +// private static final long serialVersionUID = 7151535464938775359L; +// private int maxCapacity; +// +// public SimpleLruMap(int initialCapacity, int maxCapacity) { +// super(initialCapacity, 0.75f, true); +// this.maxCapacity = maxCapacity; +// } +// +// @Override +// protected boolean removeEldestEntry(Map.Entry eldest) { +// return size() >= this.maxCapacity; +// } +// } + /** - * Use of Java's LinkedHashMap for implementing a simple LRU cache that is used - * here for mapping VLog ids to terms. + * Simple cache for finding terms for VLog ids that is optimised for the case + * where ids are inserted in a mostly ordered fashion. An LRU strategy is highly + * ineffective for this as soon as the cache capacity is smaller than the number + * of repeatedly used terms, since the cache entries there are always pushed out + * before being needed again. This implementation will at least cache a maximal + * initial fragment in such cases. It is also faster to write and requires less + * memory. * * @author Markus Kroetzsch * - * @param - * @param */ - public static class SimpleLruMap extends LinkedHashMap { - private static final long serialVersionUID = 7151535464938775359L; - private int maxCapacity; + static class OrderedTermCache { + final private HashMap terms = new HashMap<>(); + final int maxCapacity; + private long maxId = -1; - public SimpleLruMap(int initialCapacity, int maxCapacity) { - super(initialCapacity, 0.75f, true); - this.maxCapacity = maxCapacity; + public OrderedTermCache(int capacity) { + this.maxCapacity = capacity; } - @Override - protected boolean removeEldestEntry(Map.Entry eldest) { - return size() >= this.maxCapacity; + public Term get(long id) { + if (id > maxId) { + return null; + } else { + return terms.get(id); + } + } + + public void put(long id, Term term) { + if (terms.size() < maxCapacity) { + terms.put(id, term); + if (id > maxId) { + maxId = id; + } + } } } @@ -94,9 +132,10 @@ protected boolean removeEldestEntry(Map.Entry eldest) { */ int resultSize = -1; /** - * LRU cache mapping ids to terms. + * Cache mapping ids to terms. */ - final SimpleLruMap termCache; + // final SimpleLruMap termCache; + final OrderedTermCache termCache; private final Correctness correctness; @@ -112,7 +151,8 @@ public VLogFastQueryResultIterator(final karmaresearch.vlog.QueryResultIterator this.vLogQueryResultIterator = queryResultIterator; this.correctness = materialisationState; this.vLog = vLog; - this.termCache = new SimpleLruMap(256, 16384); + // this.termCache = new SimpleLruMap(256, 64000); + this.termCache = new OrderedTermCache(130000); } @Override From f548de8dcfcd66834af19512ddd949090bb0bb7b Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 17 Jun 2020 22:04:41 +0200 Subject: [PATCH 0634/1003] vLog should not be public --- .../org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java index 35c735e73..8f17d699f 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java @@ -79,7 +79,7 @@ public class VLogReasoner implements Reasoner { private static Logger LOGGER = LoggerFactory.getLogger(VLogReasoner.class); final KnowledgeBase knowledgeBase; - public final VLog vLog = new VLog(); + final VLog vLog = new VLog(); private ReasonerState reasonerState = ReasonerState.KB_NOT_LOADED; private Correctness correctness = Correctness.SOUND_BUT_INCOMPLETE; From f491cf051542860ae25156e7c4a91e09573cd044 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 18 Jun 2020 09:09:59 +0200 Subject: [PATCH 0635/1003] Change exception type as requested --- .../reasoner/vlog/VLogFastQueryResultIterator.java | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java index 2a72d738f..b3978303a 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java @@ -3,6 +3,8 @@ import java.util.Arrays; import java.util.HashMap; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; + /* * #%L * Rulewerk VLog Reasoner Support @@ -215,9 +217,8 @@ Term computeTerm(long id) { } else { return VLogToModelConverter.toConstant(s); } - } catch (NotStartedException e) { - // Should not happen, we just did a query ... - throw new RuntimeException(e); + } catch (NotStartedException e) { // Should never happen, we just did a query ... + throw new RulewerkRuntimeException(e); } } From f782f7dce215c91af47dc6f593a6d0c542421e9f Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 18 Jun 2020 18:17:59 +0200 Subject: [PATCH 0636/1003] Even faster implementation --- .../vlog/VLogFastQueryResultIterator.java | 48 +++++++++---------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java index b3978303a..6f0696987 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java @@ -2,6 +2,7 @@ import java.util.Arrays; import java.util.HashMap; +import java.util.List; import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; @@ -116,15 +117,10 @@ public void put(long id, Term term) { */ private final VLog vLog; /** - * VLog ids of the previous tuple, with the last id omitted (since it is not + * VLog ids of the previous tuple, with the last id fixed to -1 (since it is never * useful in caching). */ private long[] prevIds = null; - /** - * RuleWerk terms corresponding to the previously fetched tuple, with the last - * term omitted. - */ - private Term[] prevTerms = null; /** * True if this is the first result that is returned. */ @@ -133,6 +129,10 @@ public void put(long id, Term term) { * Size of the tuples returned in this result. */ int resultSize = -1; + /** + * Previous tuple that was returned. + */ + Term[] prevTuple; /** * Cache mapping ids to terms. */ @@ -164,23 +164,25 @@ public boolean hasNext() { @Override public QueryResult next() { - final Term[] terms; - long[] idTuple = vLogQueryResultIterator.next(); - terms = new Term[idTuple.length]; + final long[] idTuple = vLogQueryResultIterator.next(); if (firstResult) { - resultSize = terms.length; - prevIds = new long[resultSize - 1]; - prevTerms = new Term[resultSize - 1]; + resultSize = idTuple.length; + prevTuple = new Term[resultSize]; + prevIds = new long[resultSize]; + Arrays.fill(prevIds, -1); // (practically) impossible id + firstResult = false; } + if (resultSize == 1) { // Caching is pointless for unary queries + return new QueryResultImpl(List.of(computeTerm(idTuple[0]))); + } + + // (Array.copyOf was slightly faster than System.arraycopy in tests) + final Term[] terms = Arrays.copyOf(prevTuple, resultSize); int i = 0; for (long id : idTuple) { - if (!firstResult && i < resultSize - 1 && prevIds[i] == id) { - terms[i] = prevTerms[i]; - } else if (resultSize == 1) { // caching pointless for unary queries - terms[i] = computeTerm(id); - } else { + if (prevIds[i] != id) { Term term = this.termCache.get(id); if (term == null) { term = computeTerm(id); @@ -188,14 +190,13 @@ public QueryResult next() { } terms[i] = term; if (i < resultSize - 1) { - prevTerms[i] = term; prevIds[i] = id; } } i++; } - firstResult = false; + prevTuple = terms; return new QueryResultImpl(Arrays.asList(terms)); } @@ -208,12 +209,11 @@ public QueryResult next() { Term computeTerm(long id) { try { String s = vLog.getConstant(id); - // This internal handling is copied from VLog's code in {@link - // karmaresearch.vlog.TermQueryResultIterator}. - // TODO: the string operation to make null names should possibly be provided by - // VLog rather than being hardcoded here? if (s == null) { - return new NamedNullImpl("" + (id >> 40) + "_" + ((id >> 32) & 0377) + "_" + (id & 0xffffffffL)); + // This string operation extracts the internal rule number (val >> 40), + // the internal variable number ((val >> 32) & 0377), and + // a counter (val & 0xffffffffL) + return new NamedNullImpl("null" + (id >> 40) + "_" + ((id >> 32) & 0377) + "_" + (id & 0xffffffffL)); } else { return VLogToModelConverter.toConstant(s); } From 166bcb9ae27507bfbf72595a45e35b27b4992136 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 18 Jun 2020 21:53:15 +0200 Subject: [PATCH 0637/1003] Restore Java 8 compatibility --- .../rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java index 6f0696987..518f5d71a 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java @@ -1,6 +1,7 @@ package org.semanticweb.rulewerk.reasoner.vlog; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -175,7 +176,7 @@ public QueryResult next() { } if (resultSize == 1) { // Caching is pointless for unary queries - return new QueryResultImpl(List.of(computeTerm(idTuple[0]))); + return new QueryResultImpl(Collections.singletonList(computeTerm(idTuple[0]))); } // (Array.copyOf was slightly faster than System.arraycopy in tests) From 44c97816d28c8f0e680447b364e2f6ec3ff80f49 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 18 Jun 2020 22:17:19 +0200 Subject: [PATCH 0638/1003] Remove unnecessary import --- .../rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java | 1 - 1 file changed, 1 deletion(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java index 518f5d71a..17acf658e 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java @@ -3,7 +3,6 @@ import java.util.Arrays; import java.util.Collections; import java.util.HashMap; -import java.util.List; import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; From a60919acf7c4e3fb16a4e199f411f8134e17824f Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 12 Aug 2020 14:45:29 +0200 Subject: [PATCH 0639/1003] Support extraction of Java types from constants --- .../model/api/PrefixDeclarationRegistry.java | 4 + .../rulewerk/core/model/api/Terms.java | 89 +++++++++++++++++++ 2 files changed, 93 insertions(+) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java index cde555c76..49cc7abe3 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java @@ -38,6 +38,10 @@ public interface PrefixDeclarationRegistry extends Iterable getDatatypeConstants(Stream terms) return terms.filter(term -> term.getType() == TermType.DATATYPE_CONSTANT).map(DatatypeConstant.class::cast); } + /** + * Returns the lexical value of a term that is an xsd:string constant, and + * throws an exception for all other cases. + * + * @param term the term from which the string is to be extracted + * @return extracted string + * @throws IllegalArgumentException if the given term is not a constant of type + * xsd:string + */ + public static String extractString(Term term) { + if (term.getType() == TermType.DATATYPE_CONSTANT) { + DatatypeConstant datatypeConstant = (DatatypeConstant) term; + if (PrefixDeclarationRegistry.XSD_STRING.equals(datatypeConstant.getDatatype())) + return datatypeConstant.getLexicalValue(); + } + throw new IllegalArgumentException( + "Term " + term.toString() + " is not a datatype constant of type xsd:string."); + } + + /** + * Returns the IRI representation of an abstract term, and throws an exception + * for all other cases. + * + * @param term the term from which the IRI is to be extracted + * @return extracted IRI + * @throws IllegalArgumentException if the given term is not an abstract + * constant or cannot be parsed as an IRI + */ + public static URI extractIri(Term term) { + if (term.getType() == TermType.ABSTRACT_CONSTANT) { + try { + return new URI(term.getName()); + } catch (URISyntaxException e) { + throw new IllegalArgumentException(e); + } + } + throw new IllegalArgumentException("Term " + term.toString() + " is not an abstract constant."); + } + + /** + * Returns the URL representation of an abstract term, and throws an exception + * for all other cases. + * + * @param term the term from which the URL is to be extracted + * @return extracted URL + * @throws IllegalArgumentException if the given term is not an abstract + * constant or cannot be parsed as a URL + */ + public static URL extractUrl(Term term) { + if (term.getType() == TermType.ABSTRACT_CONSTANT) { + try { + return new URL(term.getName()); + } catch (MalformedURLException e) { + throw new IllegalArgumentException(e); + } + } + throw new IllegalArgumentException("Term " + term.toString() + " is not an abstract constant."); + } + + /** + * Returns the numeric value of a term that is an xsd:integer (or supported + * subtype) constant, and throws an exception for all other cases. + * + * @param term the term from which the integer is to be extracted + * @return extracted integer + * @throws IllegalArgumentException if the given term is not a constant of an + * integer type, or if the lexical + * representation could not be parsed into a + * Java int + */ + public static int extractInt(Term term) { + if (term.getType() == TermType.DATATYPE_CONSTANT) { + DatatypeConstant datatypeConstant = (DatatypeConstant) term; + if (PrefixDeclarationRegistry.XSD_INTEGER.equals(datatypeConstant.getDatatype()) + || PrefixDeclarationRegistry.XSD_LONG.equals(datatypeConstant.getDatatype()) + || PrefixDeclarationRegistry.XSD_INT.equals(datatypeConstant.getDatatype()) + || PrefixDeclarationRegistry.XSD_SHORT.equals(datatypeConstant.getDatatype()) + || PrefixDeclarationRegistry.XSD_BYTE.equals(datatypeConstant.getDatatype())) + return Integer.parseInt(datatypeConstant.getLexicalValue()); + } + throw new IllegalArgumentException( + "Term " + term.toString() + " is not a datatype constant of a supported integer type."); + } + } From 6dc399f2d7e1724db2d412e3aab4df018c102ec6 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 12 Aug 2020 14:45:51 +0200 Subject: [PATCH 0640/1003] Improve names of correctness constants --- .../org/semanticweb/rulewerk/core/reasoner/Correctness.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Correctness.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Correctness.java index 264616546..e2a00152c 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Correctness.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Correctness.java @@ -33,13 +33,13 @@ public enum Correctness { * Completeness is not guaranteed, but soundness is. For example, query * answering yields sound, but possibly incomplete answers. */ - SOUND_BUT_INCOMPLETE("sound but incomplete"), + SOUND_BUT_INCOMPLETE("sound but possibly incomplete"), /** * Soundness is not guaranteed. For example, query answering may give incorrect * (unsound and incomplete) answers. */ - INCORRECT("incorrect"), + INCORRECT("possibly incorrect"), /** * Correctness is guaranteed. For example, query answering yealds are correct From 080f7ab29e5dcdc2af2fa925944a05f0a69510ee Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 12 Aug 2020 14:51:04 +0200 Subject: [PATCH 0641/1003] Structural changes in Parser - Support - and _ in predicate names - Support Rule and PositiveLiteral arguments to (custom) directives - Use PositiveLiteral to capture source declarations; update all handlers to use this - Do not allow data source handlers to have side effects on the KB or parser --- .../rulewerk/parser/DirectiveArgument.java | 140 ++++++++++++++++-- .../rulewerk/parser/ParserConfiguration.java | 36 ++--- .../CsvFileDataSourceDeclarationHandler.java | 12 +- .../RdfFileDataSourceDeclarationHandler.java | 12 +- ...eryResultDataSourceDeclarationHandler.java | 19 +-- .../rulewerk/parser/javacc/JavaCCParser.jj | 36 +++-- .../parser/javacc/JavaCCParserBase.java | 7 +- .../parser/ParserConfigurationTest.java | 2 +- .../parser/RuleParserDataSourceTest.java | 54 +------ 9 files changed, 196 insertions(+), 122 deletions(-) diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveArgument.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveArgument.java index c31270d35..dd47cdffa 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveArgument.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveArgument.java @@ -24,6 +24,8 @@ import java.util.Optional; import java.util.function.Function; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Rule; import org.semanticweb.rulewerk.core.model.api.Term; /** @@ -39,14 +41,18 @@ private DirectiveArgument() { /** * Apply a function to the contained value. * - * @param stringHandler the function to apply to a string argument - * @param iriHandler the function to apply to an IRI - * @param termHandler the function to apply to a Term + * @param stringHandler the function to apply to a string argument + * @param iriHandler the function to apply to an IRI + * @param termHandler the function to apply to a Term + * @param ruleHandler the function to apply to a Rule + * @param positiveLiteralHandler the function to apply to a Literal * * @return the value returned by the appropriate handler function */ public abstract V apply(Function stringHandler, - Function iriHandler, Function termHandler); + Function iriHandler, Function termHandler, + Function ruleHandler, + Function positiveLiteralHandler); /** * Partially compare two arguments, without comparing the actual values. @@ -85,7 +91,9 @@ public static DirectiveArgument string(String value) { return new DirectiveArgument() { @Override public V apply(Function stringHandler, - Function iriHandler, Function termHandler) { + Function iriHandler, Function termHandler, + Function ruleHandler, + Function positiveLiteralHandler) { return stringHandler.apply(value); } @@ -98,7 +106,8 @@ public boolean equals(Object other) { } DirectiveArgument otherArgument = (DirectiveArgument) other; - return otherArgument.apply(str -> str.equals(value), iri -> false, term -> false); + return otherArgument.apply(str -> str.equals(value), iri -> false, term -> false, rule -> false, + positiveLiteral -> false); } @Override @@ -119,7 +128,9 @@ public static DirectiveArgument iri(URI value) { return new DirectiveArgument() { @Override public V apply(Function stringHandler, - Function iriHandler, Function termHandler) { + Function iriHandler, Function termHandler, + Function ruleHandler, + Function positiveLiteralHandler) { return iriHandler.apply(value); } @@ -132,7 +143,8 @@ public boolean equals(Object other) { } DirectiveArgument otherArgument = (DirectiveArgument) other; - return otherArgument.apply(str -> false, iri -> iri.equals(value), term -> false); + return otherArgument.apply(str -> false, iri -> iri.equals(value), term -> false, rule -> false, + positiveLiteral -> false); } @Override @@ -153,7 +165,9 @@ public static DirectiveArgument term(Term value) { return new DirectiveArgument() { @Override public V apply(Function stringHandler, - Function iriHandler, Function termHandler) { + Function iriHandler, Function termHandler, + Function ruleHandler, + Function positiveLiteralHandler) { return termHandler.apply(value); } @@ -166,7 +180,8 @@ public boolean equals(Object other) { } DirectiveArgument otherArgument = (DirectiveArgument) other; - return otherArgument.apply(str -> false, iri -> false, term -> term.equals(value)); + return otherArgument.apply(str -> false, iri -> false, term -> term.equals(value), rule -> false, + positiveLiteral -> false); } @Override @@ -176,6 +191,80 @@ public int hashCode() { }; } + /** + * Create an argument containing a Rule. + * + * @param value the Rule value + * + * @return An argument containing the given Rule value + */ + public static DirectiveArgument rule(Rule value) { + return new DirectiveArgument() { + @Override + public V apply(Function stringHandler, + Function iriHandler, Function termHandler, + Function ruleHandler, + Function positiveLiteralHandler) { + return ruleHandler.apply(value); + } + + @Override + public boolean equals(Object other) { + Optional maybeEquals = isEqual(other); + + if (maybeEquals.isPresent()) { + return maybeEquals.get(); + } + + DirectiveArgument otherArgument = (DirectiveArgument) other; + return otherArgument.apply(str -> false, iri -> false, term -> false, rule -> rule.equals(value), + positiveLiteral -> false); + } + + @Override + public int hashCode() { + return 53 * value.hashCode(); + } + }; + } + + /** + * Create an argument containing a PositiveLiteral. + * + * @param value the PositiveLiteral value + * + * @return An argument containing the given PositiveLiteral value + */ + public static DirectiveArgument positiveLiteral(PositiveLiteral value) { + return new DirectiveArgument() { + @Override + public V apply(Function stringHandler, + Function iriHandler, Function termHandler, + Function ruleHandler, + Function positiveLiteralHandler) { + return positiveLiteralHandler.apply(value); + } + + @Override + public boolean equals(Object other) { + Optional maybeEquals = isEqual(other); + + if (maybeEquals.isPresent()) { + return maybeEquals.get(); + } + + DirectiveArgument otherArgument = (DirectiveArgument) other; + return otherArgument.apply(str -> false, iri -> false, term -> false, rule -> false, + positiveLiteral -> positiveLiteral.equals(value)); + } + + @Override + public int hashCode() { + return 59 * value.hashCode(); + } + }; + } + /** * Create an optional from a (possible) string value. * @@ -183,7 +272,8 @@ public int hashCode() { * the argument doesn't contain a string. */ public Optional fromString() { - return this.apply(Optional::of, value -> Optional.empty(), value -> Optional.empty()); + return this.apply(Optional::of, value -> Optional.empty(), value -> Optional.empty(), value -> Optional.empty(), + value -> Optional.empty()); } /** @@ -193,7 +283,8 @@ public Optional fromString() { * argument doesn't contain a IRI. */ public Optional fromIri() { - return this.apply(value -> Optional.empty(), Optional::of, value -> Optional.empty()); + return this.apply(value -> Optional.empty(), Optional::of, value -> Optional.empty(), value -> Optional.empty(), + value -> Optional.empty()); } /** @@ -203,6 +294,29 @@ public Optional fromIri() { * the argument doesn't contain a Term. */ public Optional fromTerm() { - return this.apply(value -> Optional.empty(), value -> Optional.empty(), Optional::of); + return this.apply(value -> Optional.empty(), value -> Optional.empty(), Optional::of, value -> Optional.empty(), + value -> Optional.empty()); + } + + /** + * Create an optional from a (possible) Rule value. + * + * @return An optional containing the contained Rule, or an empty Optional if + * the argument doesn't contain a Rule. + */ + public Optional fromRule() { + return this.apply(value -> Optional.empty(), value -> Optional.empty(), value -> Optional.empty(), Optional::of, + value -> Optional.empty()); + } + + /** + * Create an optional from a (possible) PositiveLiteral value. + * + * @return An optional containing the contained PositiveLiteral, or an empty + * Optional if the argument doesn't contain a PositiveLitreal. + */ + public Optional fromPositiveLiteral() { + return this.apply(value -> Optional.empty(), value -> Optional.empty(), value -> Optional.empty(), + value -> Optional.empty(), Optional::of); } } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java index f6bc3b9d3..63d788f01 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java @@ -28,11 +28,13 @@ import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.DataSource; import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.parser.javacc.JavaCCParserBase.ConfigurableLiteralDelimiter; +import org.semanticweb.rulewerk.parser.datasources.DataSourceDeclarationHandler; import org.semanticweb.rulewerk.parser.javacc.SubParserFactory; /** @@ -78,7 +80,8 @@ public class ParserConfiguration { * production of the rules grammar, corresponding to some {@link DataSource} * type. * - * @see the grammar + * @see + * the grammar * * @param name Name of the data source, as it appears in the declaring * directive. @@ -96,30 +99,28 @@ public ParserConfiguration registerDataSource(final String name, final DataSourc } /** - * Parse the source-specific part of a Data Source declaration. + * Parse the source-specific part of a data source declaration. * * This is called by the parser to construct a {@link DataSourceDeclaration}. It * is responsible for instantiating an appropriate {@link DataSource} type. * - * @param name Name of the data source. - * @param args arguments given in the data source declaration. - * @param subParserFactory a {@link SubParserFactory} instance that creates - * parser with the same context as the current parser. + * @param declaration literal that specifies the type and parameters for this + * data source declarations * - * @throws ParsingException when the declaration is invalid, e.g., if the Data - * Source is not known. + * @throws ParsingException when the declaration is invalid, e.g., if the data + * source is not known. * - * @return the Data Source instance. + * @return the data source instance. */ - public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(final String name, - final List args, final SubParserFactory subParserFactory) throws ParsingException { - final DataSourceDeclarationHandler handler = this.dataSources.get(name); + public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(PositiveLiteral declaration) + throws ParsingException { + final DataSourceDeclarationHandler handler = this.dataSources.get(declaration.getPredicate().getName()); if (handler == null) { - throw new ParsingException("Data source \"" + name + "\" is not known."); + throw new ParsingException("Data source \"" + declaration.getPredicate().getName() + "\" is not known."); } - return handler.handleDirective(args, subParserFactory); + return handler.handleDataSourceDeclaration(declaration.getArguments()); } /** @@ -286,7 +287,8 @@ public ParserConfiguration allowNamedNulls() { } /** - * Disallow parsing of {@link org.semanticweb.rulewerk.core.model.api.NamedNull}. + * Disallow parsing of + * {@link org.semanticweb.rulewerk.core.model.api.NamedNull}. * * @return this */ @@ -295,8 +297,8 @@ public ParserConfiguration disallowNamedNulls() { } /** - * Whether parsing of {@link org.semanticweb.rulewerk.core.model.api.NamedNull} is - * allowed. + * Whether parsing of {@link org.semanticweb.rulewerk.core.model.api.NamedNull} + * is allowed. * * @return true iff parsing of NamedNulls is allowed. */ diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java index 67a66c9c4..185f073ca 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java @@ -24,12 +24,9 @@ import java.util.List; import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; -import org.semanticweb.rulewerk.parser.DataSourceDeclarationHandler; -import org.semanticweb.rulewerk.parser.DirectiveArgument; -import org.semanticweb.rulewerk.parser.DirectiveHandler; import org.semanticweb.rulewerk.parser.ParsingException; -import org.semanticweb.rulewerk.parser.javacc.SubParserFactory; /** * Handler for parsing {@link CsvFileDataSource} declarations @@ -38,10 +35,9 @@ */ public class CsvFileDataSourceDeclarationHandler implements DataSourceDeclarationHandler { @Override - public DataSource handleDirective(List arguments, final SubParserFactory subParserFactory) - throws ParsingException { - DirectiveHandler.validateNumberOfArguments(arguments, 1); - String fileName = DirectiveHandler.validateStringArgument(arguments.get(0), "source file"); + public DataSource handleDataSourceDeclaration(List terms) throws ParsingException { + DataSourceDeclarationHandler.validateNumberOfArguments(terms, 1); + String fileName = DataSourceDeclarationHandler.validateStringArgument(terms.get(0), "CSV file name"); try { return new CsvFileDataSource(fileName); diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java index 259c26759..36a3738c4 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java @@ -24,12 +24,9 @@ import java.util.List; import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; -import org.semanticweb.rulewerk.parser.DataSourceDeclarationHandler; -import org.semanticweb.rulewerk.parser.DirectiveArgument; -import org.semanticweb.rulewerk.parser.DirectiveHandler; import org.semanticweb.rulewerk.parser.ParsingException; -import org.semanticweb.rulewerk.parser.javacc.SubParserFactory; /** * Handler for parsing {@link RdfFileDataSource} declarations @@ -38,10 +35,9 @@ */ public class RdfFileDataSourceDeclarationHandler implements DataSourceDeclarationHandler { @Override - public DataSource handleDirective(List arguments, final SubParserFactory subParserFactory) - throws ParsingException { - DirectiveHandler.validateNumberOfArguments(arguments, 1); - String fileName = DirectiveHandler.validateStringArgument(arguments.get(0), "source file"); + public DataSource handleDataSourceDeclaration(List terms) throws ParsingException { + DataSourceDeclarationHandler.validateNumberOfArguments(terms, 1); + String fileName = DataSourceDeclarationHandler.validateStringArgument(terms.get(0), "RDF file name"); try { return new RdfFileDataSource(fileName); diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java index e91632ac9..3a405d83c 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java @@ -24,12 +24,9 @@ import java.util.List; import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; -import org.semanticweb.rulewerk.parser.DataSourceDeclarationHandler; -import org.semanticweb.rulewerk.parser.DirectiveArgument; -import org.semanticweb.rulewerk.parser.DirectiveHandler; import org.semanticweb.rulewerk.parser.ParsingException; -import org.semanticweb.rulewerk.parser.javacc.SubParserFactory; /** * Handler for parsing {@link SparqlQueryResultDataSource} declarations @@ -37,13 +34,13 @@ * @author Maximilian Marx */ public class SparqlQueryResultDataSourceDeclarationHandler implements DataSourceDeclarationHandler { - @Override - public DataSource handleDirective(List arguments, final SubParserFactory subParserFactory) - throws ParsingException { - DirectiveHandler.validateNumberOfArguments(arguments, 3); - URL endpoint = DirectiveHandler.validateUrlArgument(arguments.get(0), "SPARQL endpoint"); - String variables = DirectiveHandler.validateStringArgument(arguments.get(1), "variables list"); - String query = DirectiveHandler.validateStringArgument(arguments.get(2), "query fragment"); + + public DataSource handleDataSourceDeclaration(List terms) throws ParsingException { + DataSourceDeclarationHandler.validateNumberOfArguments(terms, 3); + URL endpoint = DataSourceDeclarationHandler.validateUrlArgument(terms.get(0), "SPARQL endpoint URL"); + String variables = DataSourceDeclarationHandler.validateStringArgument(terms.get(1), + "SPARQL query variable list"); + String query = DataSourceDeclarationHandler.validateStringArgument(terms.get(2), "SPARQL query pattern"); return new SparqlQueryResultDataSource(endpoint, variables, query); } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj index 7a80fd52c..e932e160e 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj @@ -34,17 +34,14 @@ options PARSER_BEGIN(JavaCCParser) package org.semanticweb.rulewerk.parser.javacc; -import java.io.File; -import java.io.InputStream; -import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.List; -import java.util.Deque; import java.util.ArrayList; -import java.util.ArrayDeque; import java.util.LinkedList; +import java.util.ArrayDeque; +import java.util.Deque; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.DirectiveArgument; @@ -62,9 +59,6 @@ import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.DataSource; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; import org.semanticweb.rulewerk.core.model.implementation.Expressions; @@ -147,12 +141,11 @@ void source() throws PrefixDeclarationException : { } DataSource dataSource() throws PrefixDeclarationException : { - Token sourceName; + PositiveLiteral positiveLiteral; List< DirectiveArgument > arguments; } { - ( sourceName = < ARGUMENT_NAME > - | sourceName = < VARORPREDNAME > ) < LPAREN > arguments = Arguments() < RPAREN > { - return parseDataSourceSpecificPartOfDataSourceDeclaration(sourceName.image, arguments, getSubParserFactory()); + positiveLiteral = positiveLiteral(FormulaContext.HEAD) { + return parseDataSourceSpecificPartOfDataSourceDeclaration(positiveLiteral); } } @@ -165,6 +158,16 @@ KnowledgeBase directive() throws PrefixDeclarationException : { } } +/*TODO List< DirectiveArgument > command() throws PrefixDeclarationException : { + Token name; + List< DirectiveArgument > arguments; +} { + name = < CUSTOM_DIRECTIVE > arguments = Arguments() < DOT > { + arguments.add(0, DirectiveArgument.string(name.image)); + return arguments; + } +}*/ + void statement() throws PrefixDeclarationException : { Statement statement; KnowledgeBase knowledgeBase; @@ -432,10 +435,14 @@ String String() : { LinkedList< DirectiveArgument > Arguments() throws PrefixDeclarationException : { DirectiveArgument argument; String str; + Rule rule; + PositiveLiteral positiveLiteral; Term t; LinkedList< DirectiveArgument > rest = new LinkedList< DirectiveArgument >(); } { - ( LOOKAHEAD(String()) str = String() { argument = DirectiveArgument.string(str); } + ( LOOKAHEAD(rule()) rule = rule() { argument = DirectiveArgument.rule(rule); } + | LOOKAHEAD(positiveLiteral(FormulaContext.HEAD)) positiveLiteral = positiveLiteral(FormulaContext.HEAD) { argument = DirectiveArgument.positiveLiteral(positiveLiteral); } + | LOOKAHEAD(String()) str = String() { argument = DirectiveArgument.string(str); } | LOOKAHEAD(absoluteIri()) str = absoluteIri() { URI url; try { @@ -479,9 +486,10 @@ MORE : { } < DEFAULT, BODY, TERM, DIRECTIVE_ARGUMENTS > TOKEN : { - < VARORPREDNAME : < A2Z> (< A2ZN >)* > + < VARORPREDNAME : < A2Z> (< A2ZND >)* > | < #A2Z : [ "a"-"z", "A"-"Z" ] > | < #A2ZN : [ "a"-"z", "A"-"Z", "0"-"9" ] > + | < #A2ZND : [ "a"-"z", "A"-"Z", "0"-"9", "-", "_" ] > | < PNAME_LN : (< PN_PREFIX >)? ":" < PN_LOCAL > > | < PNAME_NS : < PN_PREFIX > ":" > | < #PN_CHARS_BASE : [ "a"-"z", "A"-"Z", "\u00c0"-"\u00d6", diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java index 54126cd0f..8fd4e8c84 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java @@ -28,6 +28,7 @@ import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.DataSource; import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Statement; @@ -330,11 +331,9 @@ public PrefixDeclarationRegistry getPrefixDeclarationRegistry() { return this.prefixDeclarationRegistry; } - DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(String syntacticForm, - List arguments, SubParserFactory subParserFactory) throws ParseException { + DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(PositiveLiteral declaration) throws ParseException { try { - return parserConfiguration.parseDataSourceSpecificPartOfDataSourceDeclaration(syntacticForm, arguments, - subParserFactory); + return parserConfiguration.parseDataSourceSpecificPartOfDataSourceDeclaration(declaration); } catch (ParsingException e) { throw makeParseExceptionWithCause( "Failed while trying to parse the source-specific part of a data source declaration", e); diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/ParserConfigurationTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/ParserConfigurationTest.java index cae03b2c7..c9d513f6e 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/ParserConfigurationTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/ParserConfigurationTest.java @@ -28,9 +28,9 @@ import org.junit.Test; import org.mockito.Mock; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.parser.DataSourceDeclarationHandler; import org.semanticweb.rulewerk.parser.DatatypeConstantHandler; import org.semanticweb.rulewerk.parser.ParserConfiguration; +import org.semanticweb.rulewerk.parser.datasources.DataSourceDeclarationHandler; import org.semanticweb.rulewerk.parser.javacc.SubParserFactory; import org.semanticweb.rulewerk.parser.javacc.JavaCCParserBase.ConfigurableLiteralDelimiter; diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java index e72ae9dfb..1a376bc3e 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java @@ -31,21 +31,20 @@ import org.junit.Test; import org.mockito.ArgumentMatchers; -import org.semanticweb.rulewerk.core.model.api.DataSource; import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; -import org.semanticweb.rulewerk.parser.DataSourceDeclarationHandler; -import org.semanticweb.rulewerk.parser.DirectiveArgument; import org.semanticweb.rulewerk.parser.ParserConfiguration; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; -import org.semanticweb.rulewerk.parser.javacc.SubParserFactory; +import org.semanticweb.rulewerk.parser.datasources.DataSourceDeclarationHandler; public class RuleParserDataSourceTest { private static final String EXAMPLE_RDF_FILE_PATH = "src/main/data/input/example.nt.gz"; @@ -140,16 +139,15 @@ public void testCustomDataSource() throws ParsingException { DataSourceDeclarationHandler handler = mock(DataSourceDeclarationHandler.class); ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerDataSource("mock-source", handler); - doReturn(source).when(handler).handleDirective(ArgumentMatchers.>any(), - ArgumentMatchers.any()); + doReturn(source).when(handler).handleDataSourceDeclaration(ArgumentMatchers.>any()); String input = "@source p[2] : mock-source(\"hello\", \"world\") ."; - List expectedArguments = Arrays.asList(DirectiveArgument.string("hello"), - DirectiveArgument.string("world")); + List expectedArguments = Arrays.asList( + Expressions.makeDatatypeConstant("hello", PrefixDeclarationRegistry.XSD_STRING), + Expressions.makeDatatypeConstant("world", PrefixDeclarationRegistry.XSD_STRING)); RuleParser.parse(input, parserConfiguration); - verify(handler).handleDirective(ArgumentMatchers.eq(expectedArguments), - ArgumentMatchers.any()); + verify(handler).handleDataSourceDeclaration(ArgumentMatchers.eq(expectedArguments)); } @Test @@ -195,40 +193,4 @@ public void parseDataSourceDeclaration_windowsStylePathName_succeeds() throws Pa RuleParser.parseDataSourceDeclaration("@source p[1] : load-csv(\"\\\\test\\\\with\\\\backslashes.csv\") ."); } - class DuplicatingDataSourceDeclarationHandler implements DataSourceDeclarationHandler { - public DataSource handleDirective(List arguments, SubParserFactory subParserFactory) - throws ParsingException { - CsvFileDataSource source; - try { - source = new CsvFileDataSource(EXAMPLE_CSV_FILE_PATH); - } catch (IOException e) { - throw new ParsingException(e); - } - - KnowledgeBase knowledgeBase = getKnowledgeBase(subParserFactory); - ParserConfiguration parserConfiguration = getParserConfiguration(subParserFactory); - RuleParser.parseInto(knowledgeBase, "@source q[2] : load-csv(\"" + EXAMPLE_CSV_FILE_PATH + "\") .", - parserConfiguration); - - return source; - } - } - - @Test - public void parseInto_mockDataSourceWithBase_succeeds() throws ParsingException { - ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); - parserConfiguration.registerDataSource("mock-source", new DuplicatingDataSourceDeclarationHandler()); - String input = "@source p[2] : mock-source(\"hello\", \"world\") ."; - KnowledgeBase knowledgeBase = new KnowledgeBase(); - RuleParser.parseInto(knowledgeBase, input, parserConfiguration, "https://example.org"); - assertEquals(2, knowledgeBase.getStatements().size()); - } - - @Test(expected = ParsingException.class) - public void parseDataSourceDeclaration_unexpectedlyAddsTwoDatasources_throws() throws ParsingException { - ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); - parserConfiguration.registerDataSource("mock-source", new DuplicatingDataSourceDeclarationHandler()); - String input = "@source p[2] : mock-source(\"hello\", \"world\") ."; - RuleParser.parseDataSourceDeclaration(input, parserConfiguration); - } } From 3510326c0961c2adc15a002e807af98fc8623544 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 12 Aug 2020 23:14:00 +0200 Subject: [PATCH 0642/1003] Support commands in parser - Add Argument (formerly DirectiveArgument) and Command to core api - Support parsing commands with zero of more arguments --- .../rulewerk/core/model/api/Argument.java | 44 ++++----- .../rulewerk/core/model/api/Command.java | 69 +++++++++++++ .../rulewerk/core/model/ArgumentTest.java | 23 ++--- .../parser/DataSourceDeclarationHandler.java | 32 ------- .../rulewerk/parser/DirectiveHandler.java | 15 +-- .../rulewerk/parser/ParserConfiguration.java | 3 +- .../rulewerk/parser/RuleParser.java | 10 ++ .../DataSourceDeclarationHandler.java | 96 +++++++++++++++++++ .../ImportFileDirectiveHandler.java | 4 +- .../ImportFileRelativeDirectiveHandler.java | 4 +- .../rulewerk/parser/javacc/JavaCCParser.jj | 61 +++++++----- .../parser/javacc/JavaCCParserBase.java | 4 +- .../rulewerk/parser/CommandParserTest.java | 23 +++++ .../rulewerk/parser/DirectiveHandlerTest.java | 11 ++- 14 files changed, 289 insertions(+), 110 deletions(-) rename rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveArgument.java => rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java (88%) create mode 100644 rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java rename rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveArgumentTest.java => rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ArgumentTest.java (73%) delete mode 100644 rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DataSourceDeclarationHandler.java create mode 100644 rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/DataSourceDeclarationHandler.java create mode 100644 rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveArgument.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java similarity index 88% rename from rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveArgument.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java index dd47cdffa..7fe488150 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveArgument.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java @@ -1,4 +1,4 @@ -package org.semanticweb.rulewerk.parser; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L @@ -24,18 +24,14 @@ import java.util.Optional; import java.util.function.Function; -import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; -import org.semanticweb.rulewerk.core.model.api.Rule; -import org.semanticweb.rulewerk.core.model.api.Term; - /** * A tagged union representing the possible types allowed to appear as arguments - * in directives. + * in commands and parser directives. * * @author Maximilian Marx */ -public abstract class DirectiveArgument { - private DirectiveArgument() { +public abstract class Argument { + private Argument() { } /** @@ -73,7 +69,7 @@ protected Optional isEqual(Object other) { return Optional.of(true); } - if (!(other instanceof DirectiveArgument)) { + if (!(other instanceof Argument)) { return Optional.of(false); } @@ -87,8 +83,8 @@ protected Optional isEqual(Object other) { * * @return An argument containing the given string value */ - public static DirectiveArgument string(String value) { - return new DirectiveArgument() { + public static Argument string(String value) { + return new Argument() { @Override public V apply(Function stringHandler, Function iriHandler, Function termHandler, @@ -105,7 +101,7 @@ public boolean equals(Object other) { return maybeEquals.get(); } - DirectiveArgument otherArgument = (DirectiveArgument) other; + Argument otherArgument = (Argument) other; return otherArgument.apply(str -> str.equals(value), iri -> false, term -> false, rule -> false, positiveLiteral -> false); } @@ -124,8 +120,8 @@ public int hashCode() { * * @return An argument containing the given IRI value */ - public static DirectiveArgument iri(URI value) { - return new DirectiveArgument() { + public static Argument iri(URI value) { + return new Argument() { @Override public V apply(Function stringHandler, Function iriHandler, Function termHandler, @@ -142,7 +138,7 @@ public boolean equals(Object other) { return maybeEquals.get(); } - DirectiveArgument otherArgument = (DirectiveArgument) other; + Argument otherArgument = (Argument) other; return otherArgument.apply(str -> false, iri -> iri.equals(value), term -> false, rule -> false, positiveLiteral -> false); } @@ -161,8 +157,8 @@ public int hashCode() { * * @return An argument containing the given Term value */ - public static DirectiveArgument term(Term value) { - return new DirectiveArgument() { + public static Argument term(Term value) { + return new Argument() { @Override public V apply(Function stringHandler, Function iriHandler, Function termHandler, @@ -179,7 +175,7 @@ public boolean equals(Object other) { return maybeEquals.get(); } - DirectiveArgument otherArgument = (DirectiveArgument) other; + Argument otherArgument = (Argument) other; return otherArgument.apply(str -> false, iri -> false, term -> term.equals(value), rule -> false, positiveLiteral -> false); } @@ -198,8 +194,8 @@ public int hashCode() { * * @return An argument containing the given Rule value */ - public static DirectiveArgument rule(Rule value) { - return new DirectiveArgument() { + public static Argument rule(Rule value) { + return new Argument() { @Override public V apply(Function stringHandler, Function iriHandler, Function termHandler, @@ -216,7 +212,7 @@ public boolean equals(Object other) { return maybeEquals.get(); } - DirectiveArgument otherArgument = (DirectiveArgument) other; + Argument otherArgument = (Argument) other; return otherArgument.apply(str -> false, iri -> false, term -> false, rule -> rule.equals(value), positiveLiteral -> false); } @@ -235,8 +231,8 @@ public int hashCode() { * * @return An argument containing the given PositiveLiteral value */ - public static DirectiveArgument positiveLiteral(PositiveLiteral value) { - return new DirectiveArgument() { + public static Argument positiveLiteral(PositiveLiteral value) { + return new Argument() { @Override public V apply(Function stringHandler, Function iriHandler, Function termHandler, @@ -253,7 +249,7 @@ public boolean equals(Object other) { return maybeEquals.get(); } - DirectiveArgument otherArgument = (DirectiveArgument) other; + Argument otherArgument = (Argument) other; return otherArgument.apply(str -> false, iri -> false, term -> false, rule -> false, positiveLiteral -> positiveLiteral.equals(value)); } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java new file mode 100644 index 000000000..855652158 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java @@ -0,0 +1,69 @@ +package org.semanticweb.rulewerk.core.model.api; + +import java.util.List; + +import org.semanticweb.rulewerk.core.model.implementation.Serializer; + +/** + * Class for representing a generic command that can be executed. + * + * @author Markus Kroetzsch + * + */ +public class Command implements Entity { + + final String name; + final List arguments; + + /** + * Constructor + * + * @param name String name of the command + * @param arguments list of arguments of the command + */ + public Command(String name, List arguments) { + this.name = name; + this.arguments = arguments; + } + + /** + * Returns the command name. + * + * @return + */ + public String getName() { + return name; + } + + /** + * Returns the command arguments. + * + * @return + */ + public List getArguments() { + return arguments; + } + + @Override + public String getSyntacticRepresentation() { + StringBuilder result = new StringBuilder("@"); + result.append(name); + for (Argument argument : arguments) { + result.append(" "); + if (argument.fromRule().isPresent()) { + Rule rule = argument.fromRule().get(); + result.append(Serializer.getString(rule.getHead())).append(Serializer.RULE_SEPARATOR) + .append(Serializer.getString(rule.getBody())); + } else if (argument.fromPositiveLiteral().isPresent()) { + result.append(argument.fromPositiveLiteral().get().getSyntacticRepresentation()); + } else if (argument.fromString().isPresent()) { + result.append(Serializer.getString(argument.fromString().get())); + } else { + throw new UnsupportedOperationException("Serialisation of commands is not fully implemented yet."); + } + } + result.append(Serializer.STATEMENT_SEPARATOR); + return result.toString(); + } + +} diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveArgumentTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ArgumentTest.java similarity index 73% rename from rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveArgumentTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ArgumentTest.java index 2ce1af622..bcec475d1 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveArgumentTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ArgumentTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.rulewerk.parser; +package org.semanticweb.rulewerk.core.model; /*- * #%L @@ -24,17 +24,18 @@ import java.net.URI; import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Argument; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.Expressions; -public class DirectiveArgumentTest { +public class ArgumentTest { private static final String STRING = "src/test/resources/facts.rls"; private static final URI IRI = URI.create("https://example.org"); private static final Term TERM = Expressions.makeDatatypeConstant(STRING, IRI.toString()); - private static final DirectiveArgument STRING_ARGUMENT = DirectiveArgument.string(STRING); - private static final DirectiveArgument IRI_ARGUMENT = DirectiveArgument.iri(IRI); - private static final DirectiveArgument TERM_ARGUMENT = DirectiveArgument.term(TERM); + private static final Argument STRING_ARGUMENT = Argument.string(STRING); + private static final Argument IRI_ARGUMENT = Argument.iri(IRI); + private static final Argument TERM_ARGUMENT = Argument.term(TERM); @Test public void equals_null_returnsFalse() { @@ -52,17 +53,17 @@ public void equals_self_returnsTrue() { @Test public void equals_equal_returnsTrue() { - assertTrue(STRING_ARGUMENT.equals(DirectiveArgument.string(STRING))); - assertTrue(IRI_ARGUMENT.equals(DirectiveArgument.iri(IRI))); - assertTrue(TERM_ARGUMENT.equals(DirectiveArgument.term(TERM))); + assertTrue(STRING_ARGUMENT.equals(Argument.string(STRING))); + assertTrue(IRI_ARGUMENT.equals(Argument.iri(IRI))); + assertTrue(TERM_ARGUMENT.equals(Argument.term(TERM))); } @Test public void equals_notEqualButSameType_returnsFalse() { - assertFalse(STRING_ARGUMENT.equals(DirectiveArgument.string(STRING + "test"))); - assertFalse(IRI_ARGUMENT.equals(DirectiveArgument.iri(URI.create("https://example.com")))); + assertFalse(STRING_ARGUMENT.equals(Argument.string(STRING + "test"))); + assertFalse(IRI_ARGUMENT.equals(Argument.iri(URI.create("https://example.com")))); assertFalse(TERM_ARGUMENT - .equals(DirectiveArgument.term(Expressions.makeDatatypeConstant(STRING, "https://example.com")))); + .equals(Argument.term(Expressions.makeDatatypeConstant(STRING, "https://example.com")))); } @Test diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DataSourceDeclarationHandler.java deleted file mode 100644 index e17ae1d1e..000000000 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DataSourceDeclarationHandler.java +++ /dev/null @@ -1,32 +0,0 @@ -package org.semanticweb.rulewerk.parser; - -/*- - * #%L - * Rulewerk Parser - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import org.semanticweb.rulewerk.core.model.api.DataSource; - -/** - * Handler for parsing a custom Data Source declaration. - * - * @author Maximilian Marx - */ -@FunctionalInterface -public interface DataSourceDeclarationHandler extends DirectiveHandler { -} diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java index 76c75e716..1be48eba7 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java @@ -27,6 +27,7 @@ import java.nio.file.InvalidPathException; import java.util.List; +import org.semanticweb.rulewerk.core.model.api.Argument; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; @@ -53,7 +54,7 @@ public interface DirectiveHandler { * directive, or the number of arguments is invalid. * @return a {@code T} instance corresponding to the given arguments. */ - public T handleDirective(List arguments, final SubParserFactory subParserFactory) + public T handleDirective(List arguments, final SubParserFactory subParserFactory) throws ParsingException; /** @@ -65,7 +66,7 @@ public T handleDirective(List arguments, final SubParserFacto * @throws ParsingException when the given number of Arguments is invalid for * the Directive statement. */ - public static void validateNumberOfArguments(final List arguments, final int number) + public static void validateNumberOfArguments(final List arguments, final int number) throws ParsingException { if (arguments.size() != number) { throw new ParsingException( @@ -84,7 +85,7 @@ public static void validateNumberOfArguments(final List argum * * @return the contained {@link String}. */ - public static String validateStringArgument(final DirectiveArgument argument, final String description) + public static String validateStringArgument(final Argument argument, final String description) throws ParsingException { return argument.fromString() .orElseThrow(() -> new ParsingException("description \"" + argument + "\" is not a string.")); @@ -101,7 +102,7 @@ public static String validateStringArgument(final DirectiveArgument argument, fi * * @return the File corresponding to the contained file path. */ - public static File validateFilenameArgument(final DirectiveArgument argument, final String description) + public static File validateFilenameArgument(final Argument argument, final String description) throws ParsingException { String fileName = DirectiveHandler.validateStringArgument(argument, description); File file = new File(fileName); @@ -126,7 +127,7 @@ public static File validateFilenameArgument(final DirectiveArgument argument, fi * * @return the contained IRI. */ - public static URI validateIriArgument(final DirectiveArgument argument, final String description) + public static URI validateIriArgument(final Argument argument, final String description) throws ParsingException { return argument.fromIri() .orElseThrow(() -> new ParsingException(description + "\"" + argument + "\" is not an IRI.")); @@ -143,7 +144,7 @@ public static URI validateIriArgument(final DirectiveArgument argument, final St * * @return the {@link URL} corresponding to the contained IRI. */ - public static URL validateUrlArgument(final DirectiveArgument argument, final String description) + public static URL validateUrlArgument(final Argument argument, final String description) throws ParsingException { URI iri = DirectiveHandler.validateIriArgument(argument, description); try { @@ -164,7 +165,7 @@ public static URL validateUrlArgument(final DirectiveArgument argument, final St * * @return the contained {@link Term}. */ - public static Term validateTermArgument(final DirectiveArgument argument, final String description) + public static Term validateTermArgument(final Argument argument, final String description) throws ParsingException { return argument.fromTerm() .orElseThrow(() -> new ParsingException(description + "\"" + argument + "\" is not a Term.")); diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java index 63d788f01..cec1ad19e 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java @@ -25,6 +25,7 @@ import java.util.List; import org.apache.commons.lang3.Validate; +import org.semanticweb.rulewerk.core.model.api.Argument; import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.DataSource; import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; @@ -253,7 +254,7 @@ public ParserConfiguration registerDirective(String name, DirectiveHandler arguments, + public KnowledgeBase parseDirectiveStatement(String name, List arguments, SubParserFactory subParserFactory) throws ParsingException { final DirectiveHandler handler = this.directives.get(name); diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java index 5ffdcf281..124bb5381 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java @@ -25,6 +25,7 @@ import java.util.List; import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; import org.semanticweb.rulewerk.core.model.api.Entity; import org.semanticweb.rulewerk.core.model.api.Fact; @@ -252,6 +253,15 @@ public static DataSourceDeclaration parseDataSourceDeclaration(final String inpu return parseDataSourceDeclaration(input, null); } + public static Command parseCommand(final String input, ParserConfiguration parserConfiguration) + throws ParsingException { + return parseSyntaxFragment(input, JavaCCParser::command, "command", parserConfiguration); + } + + public static Command parseCommand(final String input) throws ParsingException { + return parseCommand(input, null); + } + static KnowledgeBase doParse(final JavaCCParser parser) throws ParsingException { try { parser.parse(); diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/DataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/DataSourceDeclarationHandler.java new file mode 100644 index 000000000..88801c331 --- /dev/null +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/DataSourceDeclarationHandler.java @@ -0,0 +1,96 @@ +package org.semanticweb.rulewerk.parser.datasources; + +import java.net.URL; +import java.util.List; + +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Terms; +import org.semanticweb.rulewerk.parser.ParsingException; + +/** + * Handler for interpreting the arguments of a custom Data Source declaration. + * + * @author Markus Kroetzsch + */ +@FunctionalInterface +public interface DataSourceDeclarationHandler { + + DataSource handleDataSourceDeclaration(List terms) throws ParsingException; + + /** + * Validate the provided number of arguments to the source declaration. + * + * @param terms arguments given to the source declaration. + * @param number expected number of arguments + * + * @throws ParsingException when the number of terms does not match expectations + */ + public static void validateNumberOfArguments(final List terms, final int number) throws ParsingException { + if (terms.size() != number) { + throw new ParsingException( + "Invalid number of arguments " + terms.size() + " for @source declaration, expected " + number); + } + } + + /** + * Returns the string content of the given term, or reports an error if the term + * is not an xsd:string. + * + * @param term the term to be processed + * @param parameterName the string name of the parameter to be used in error + * messages + * @return the extracted string + * @throws ParsingException thrown if the term was not a String + */ + public static String validateStringArgument(Term term, String parameterName) throws ParsingException { + try { + return Terms.extractString(term); + } catch (IllegalArgumentException e) { + throw makeParameterParsingException(term, parameterName, e); + } + } + + /** + * Returns the URL represented by the given term, or reports an error if no + * valid URL could be extracted from the term. + * + * @param term the term to be processed + * @param parameterName the string name of the parameter to be used in error + * messages + * @return the extracted URL + * @throws ParsingException thrown if the term was not a URL + */ + public static URL validateUrlArgument(Term term, String parameterName) throws ParsingException { + try { + return Terms.extractUrl(term); + } catch (IllegalArgumentException e) { + throw makeParameterParsingException(term, parameterName, e); + } + } + + static ParsingException makeParameterParsingException(Term term, String parameterName, Throwable cause) { + return new ParsingException("Expected " + parameterName + " to be a string. Found " + term.toString() + ".", + cause); + } +} diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java index ae227a9d1..2580a2b56 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java @@ -26,8 +26,8 @@ import java.util.List; import org.semanticweb.rulewerk.core.exceptions.RulewerkException; +import org.semanticweb.rulewerk.core.model.api.Argument; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.parser.DirectiveArgument; import org.semanticweb.rulewerk.parser.DirectiveHandler; import org.semanticweb.rulewerk.parser.ParserConfiguration; import org.semanticweb.rulewerk.parser.ParsingException; @@ -42,7 +42,7 @@ public class ImportFileDirectiveHandler implements DirectiveHandler { @Override - public KnowledgeBase handleDirective(List arguments, final SubParserFactory subParserFactory) + public KnowledgeBase handleDirective(List arguments, final SubParserFactory subParserFactory) throws ParsingException { DirectiveHandler.validateNumberOfArguments(arguments, 1); File file = DirectiveHandler.validateFilenameArgument(arguments.get(0), "rules file"); diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java index 7de06a3ea..f70831417 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java @@ -26,9 +26,9 @@ import java.util.List; import org.semanticweb.rulewerk.core.exceptions.RulewerkException; +import org.semanticweb.rulewerk.core.model.api.Argument; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.parser.DirectiveArgument; import org.semanticweb.rulewerk.parser.DirectiveHandler; import org.semanticweb.rulewerk.parser.ParserConfiguration; import org.semanticweb.rulewerk.parser.ParsingException; @@ -42,7 +42,7 @@ */ public class ImportFileRelativeDirectiveHandler implements DirectiveHandler { @Override - public KnowledgeBase handleDirective(List arguments, SubParserFactory subParserFactory) + public KnowledgeBase handleDirective(List arguments, SubParserFactory subParserFactory) throws ParsingException { DirectiveHandler.validateNumberOfArguments(arguments, 1); PrefixDeclarationRegistry prefixDeclarationRegistry = getPrefixDeclarationRegistry(subParserFactory); diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj index e932e160e..59e2c3f85 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj @@ -44,7 +44,6 @@ import java.util.ArrayDeque; import java.util.Deque; import org.semanticweb.rulewerk.parser.ParsingException; -import org.semanticweb.rulewerk.parser.DirectiveArgument; import org.semanticweb.rulewerk.parser.javacc.JavaCCParserBase; import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; @@ -60,6 +59,9 @@ import org.semanticweb.rulewerk.core.model.api.DataSource; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.script.Argument; +import org.semanticweb.rulewerk.core.script.Command; + import org.semanticweb.rulewerk.core.model.implementation.Expressions; public class JavaCCParser extends JavaCCParserBase @@ -142,7 +144,7 @@ void source() throws PrefixDeclarationException : { DataSource dataSource() throws PrefixDeclarationException : { PositiveLiteral positiveLiteral; - List< DirectiveArgument > arguments; + List< Argument > arguments; } { positiveLiteral = positiveLiteral(FormulaContext.HEAD) { return parseDataSourceSpecificPartOfDataSourceDeclaration(positiveLiteral); @@ -151,22 +153,25 @@ DataSource dataSource() throws PrefixDeclarationException : { KnowledgeBase directive() throws PrefixDeclarationException : { Token name; - List< DirectiveArgument > arguments; + List< Argument > arguments; } { name = < CUSTOM_DIRECTIVE > arguments = Arguments() < DOT > { return parseDirectiveStatement(name.image, arguments, getSubParserFactory()); } } -/*TODO List< DirectiveArgument > command() throws PrefixDeclarationException : { +Command command() throws PrefixDeclarationException : { Token name; - List< DirectiveArgument > arguments; + List< Argument > arguments; } { - name = < CUSTOM_DIRECTIVE > arguments = Arguments() < DOT > { - arguments.add(0, DirectiveArgument.string(name.image)); - return arguments; - } -}*/ + name = < CUSTOM_DIRECTIVE > + ( arguments = Arguments() < DOT > { + return new Command(name.image,arguments); + } | + < DOT > { + return new Command(name.image, new LinkedList< Argument >()); + } ) +} void statement() throws PrefixDeclarationException : { Statement statement; @@ -180,10 +185,18 @@ void statement() throws PrefixDeclarationException : { } Rule rule() throws PrefixDeclarationException : { + Rule rule; +} { + rule = ruleNoDot() < DOT > { + return rule; + } +} + +Rule ruleNoDot() throws PrefixDeclarationException : { List < PositiveLiteral > head; List < Literal > body; } { - head = listOfPositiveLiterals(FormulaContext.HEAD) < ARROW > body = listOfLiterals(FormulaContext.BODY) < DOT > { + head = listOfPositiveLiterals(FormulaContext.HEAD) < ARROW > body = listOfLiterals(FormulaContext.BODY) { // check that the intersection between headExiVars and BodyVars is empty for (String variable : headExiVars) { if (bodyVars.contains(variable)) @@ -432,17 +445,17 @@ String String() : { ) { return unescapeStr(t.image, t.beginLine, t.beginColumn); } } -LinkedList< DirectiveArgument > Arguments() throws PrefixDeclarationException : { - DirectiveArgument argument; +LinkedList< Argument > Arguments() throws PrefixDeclarationException : { + Argument argument; String str; Rule rule; PositiveLiteral positiveLiteral; Term t; - LinkedList< DirectiveArgument > rest = new LinkedList< DirectiveArgument >(); + LinkedList< Argument > rest = new LinkedList< Argument >(); } { - ( LOOKAHEAD(rule()) rule = rule() { argument = DirectiveArgument.rule(rule); } - | LOOKAHEAD(positiveLiteral(FormulaContext.HEAD)) positiveLiteral = positiveLiteral(FormulaContext.HEAD) { argument = DirectiveArgument.positiveLiteral(positiveLiteral); } - | LOOKAHEAD(String()) str = String() { argument = DirectiveArgument.string(str); } + ( LOOKAHEAD(ruleNoDot()) rule = ruleNoDot() { argument = Argument.rule(rule); } + | LOOKAHEAD(positiveLiteral(FormulaContext.HEAD)) positiveLiteral = positiveLiteral(FormulaContext.HEAD) { argument = Argument.positiveLiteral(positiveLiteral); } + | LOOKAHEAD(String()) str = String() { argument = Argument.string(str); } | LOOKAHEAD(absoluteIri()) str = absoluteIri() { URI url; try { @@ -450,10 +463,10 @@ LinkedList< DirectiveArgument > Arguments() throws PrefixDeclarationException : } catch (URISyntaxException e) { throw makeParseExceptionWithCause("Error parsing IRIhandler: " + e.getMessage(), e); } - argument = DirectiveArgument.iri(url); + argument = Argument.iri(url); } - | t = term(FormulaContext.HEAD) { argument = DirectiveArgument.term(t); } - ) [< COMMA > rest = Arguments()] { + | t = term(FormulaContext.HEAD) { argument = Argument.term(t); } + ) [rest = Arguments()] { rest.addFirst(argument); return rest; } @@ -539,11 +552,11 @@ MORE : { | < COLON : ":" > } -TOKEN : { - < ARROW : ":-" > : BODY +< DEFAULT, BODY, DIRECTIVE_ARGUMENTS > TOKEN : { + < ARROW : ":-" > } -< DEFAULT, BODY > TOKEN : { +< DEFAULT, BODY, DIRECTIVE_ARGUMENTS > TOKEN : { < TILDE : "~" > } @@ -575,7 +588,7 @@ TOKEN : { | < ARGUMENT_NAME : < DIRECTIVENAME > > } -< TERM > TOKEN : { +< TERM, DIRECTIVE_ARGUMENTS > TOKEN : { < UNIVAR : "?" < VARORPREDNAME > > | < EXIVAR : "!" < VARORPREDNAME > > | < LANGTAG : "@" ( < A2Z > )+ ( "-" ( < A2ZN > )+ )? > { diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java index 8fd4e8c84..9ade274bf 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java @@ -25,6 +25,7 @@ import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.Argument; import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.DataSource; import org.semanticweb.rulewerk.core.model.api.NamedNull; @@ -38,7 +39,6 @@ import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.implementation.Skolemization; import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; -import org.semanticweb.rulewerk.parser.DirectiveArgument; import org.semanticweb.rulewerk.parser.LocalPrefixDeclarationRegistry; import org.semanticweb.rulewerk.parser.ParserConfiguration; import org.semanticweb.rulewerk.parser.ParsingException; @@ -345,7 +345,7 @@ Term parseConfigurableLiteral(ConfigurableLiteralDelimiter delimiter, String syn return parserConfiguration.parseConfigurableLiteral(delimiter, syntacticForm, subParserFactory); } - KnowledgeBase parseDirectiveStatement(String name, List arguments, + KnowledgeBase parseDirectiveStatement(String name, List arguments, SubParserFactory subParserFactory) throws ParseException { try { return parserConfiguration.parseDirectiveStatement(name, arguments, subParserFactory); diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java new file mode 100644 index 000000000..93c527d1d --- /dev/null +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java @@ -0,0 +1,23 @@ +package org.semanticweb.rulewerk.parser; + +import static org.junit.Assert.*; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.parser.javacc.JavaCCParser; + +public class CommandParserTest { + + @Test + public void parseCommand() throws ParsingException { + String input = "@query p(?X, a):- q(?X) \"string\" abcd p(a) ."; + Command command = RuleParser.parseSyntaxFragment(input, JavaCCParser::command, "command", null); + assertEquals("query", command.getName()); + assertEquals(5, command.getArguments().size()); + assertTrue(command.getArguments().get(0).fromRule().isPresent()); + assertTrue(command.getArguments().get(1).fromString().isPresent()); + assertTrue(command.getArguments().get(2).fromTerm().isPresent()); + assertTrue(command.getArguments().get(3).fromPositiveLiteral().isPresent()); + assertTrue(command.getArguments().get(4).fromIri().isPresent()); + } +} diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java index 02f58e5ad..725d54dc1 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java @@ -26,6 +26,7 @@ import java.net.URI; import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Argument; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.Expressions; @@ -34,9 +35,9 @@ public class DirectiveHandlerTest { private static final URI IRI = URI.create("https://example.org"); private static final Term TERM = Expressions.makeDatatypeConstant(STRING, IRI.toString()); - private static final DirectiveArgument STRING_ARGUMENT = DirectiveArgument.string(STRING); - private static final DirectiveArgument IRI_ARGUMENT = DirectiveArgument.iri(IRI); - private static final DirectiveArgument TERM_ARGUMENT = DirectiveArgument.term(TERM); + private static final Argument STRING_ARGUMENT = Argument.string(STRING); + private static final Argument IRI_ARGUMENT = Argument.iri(IRI); + private static final Argument TERM_ARGUMENT = Argument.term(TERM); @Test public void validateStringArgument_stringArgument_succeeds() throws ParsingException { @@ -90,7 +91,7 @@ public void validateFilenameArgument_filename_succeeds() throws ParsingException @Test public void validateFilenameArgument_invalidFilename_throws() throws ParsingException { - DirectiveHandler.validateFilenameArgument(DirectiveArgument.string(STRING + "-nonexistant"), + DirectiveHandler.validateFilenameArgument(Argument.string(STRING + "-nonexistant"), "filename argument"); } @@ -101,7 +102,7 @@ public void validateUrlArgument_url_succeeds() throws ParsingException, Malforme @Test(expected = ParsingException.class) public void validateUrlArgument_invalidUrl_throws() throws ParsingException { - DirectiveHandler.validateUrlArgument(DirectiveArgument.iri(URI.create("example://test")), "url argument"); + DirectiveHandler.validateUrlArgument(Argument.iri(URI.create("example://test")), "url argument"); } } From c472c46e8a812778eab95d81a57e6d0937cda51c Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 12 Aug 2020 23:47:37 +0200 Subject: [PATCH 0643/1003] Utility class to measure times --- .../rulewerk/core/reasoner/Timer.java | 545 ++++++++++++++++++ 1 file changed, 545 insertions(+) create mode 100644 rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Timer.java diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Timer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Timer.java new file mode 100644 index 000000000..9b555ede2 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Timer.java @@ -0,0 +1,545 @@ +package org.semanticweb.rulewerk.core.reasoner; + +/*- + * #%L + * Rulewerk Examples + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.lang.management.ManagementFactory; +import java.lang.management.ThreadMXBean; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Class for keeping CPU and system times. The class has a number of features + * that can be used to measure and aggregate times across many threads and many + * methods. + * + * @author Markus Kroetzsch + */ +public class Timer { + + private static Logger LOGGER = LoggerFactory.getLogger(Timer.class); + + /** Flag for indicating that no times should be taken (just count runs). */ + public static final int RECORD_NONE = 0x00000000; + /** Flag for indicating that CPU time should be taken. */ + public static final int RECORD_CPUTIME = 0x00000001; + /** Flag for indicating that wall clock time should be taken. */ + public static final int RECORD_WALLTIME = 0x00000002; + /** Flag for indicating that all supported times should be taken. */ + public static final int RECORD_ALL = RECORD_CPUTIME | RECORD_WALLTIME; + + static final ThreadMXBean tmxb = ManagementFactory.getThreadMXBean(); + + static final ConcurrentHashMap registeredTimers = new ConcurrentHashMap(); + + protected final String name; + protected final long threadId; + protected final int todoFlags; + + protected long currentStartCpuTime = -1; + protected long currentStartWallTime = -1; + protected boolean isRunning = false; + protected long totalCpuTime = 0; + protected long totalWallTime = 0; + protected int measurements = 0; + protected int threadCount = 0; + + /** + * Constructor. Every timer is identified by three things: a string name, an + * integer for flagging its tasks (todos), and a thread id (long). + * + * Tasks can be flagged by a disjunction of constants like RECORD_CPUTIME and + * RECORD_WALLTIME. Only times for which an according flag is set will be + * recorded. + * + * The thread id can be the actual id of the thread that is measured, or 0 + * (invalid id) to not assign the timer to any thread. In this case, no CPU time + * measurement is possible since Java does not allow us to measure the total CPU + * time across all threads. + * + * @param name + * @param todoFlags + * @param threadId + */ + public Timer(String name, int todoFlags, long threadId) { + this.name = name; + this.todoFlags = todoFlags; + this.threadId = threadId; + + if (!tmxb.isThreadCpuTimeEnabled()) { + tmxb.setThreadCpuTimeEnabled(true); + } + } + + public Timer(String name) { + this(name, RECORD_ALL, Thread.currentThread().getId()); + } + + /** + * + * @param name + * @param todoFlags + * @return a new {@link Timer} for the current thread + */ + static public Timer getTimerForCurrentThread(String name, int todoFlags) { + return new Timer(name, todoFlags, Thread.currentThread().getId()); + } + + /** + * Get the total recorded CPU time in nanoseconds. + * + * @return recorded CPU time in nanoseconds + */ + public long getTotalCpuTime() { + return totalCpuTime; + } + + public long getAvgCpuTime() { + return totalCpuTime > 0 && measurements > 0 ? totalCpuTime / measurements : -1; + } + + /** + * Get the string name of the timer. + * + * @return string name + */ + public String getName() { + return name; + } + + /** + * Get the ID of the thread for which this timer was created. + * + * @return thread ID + */ + public long getThreadId() { + return threadId; + } + + /** + * Get the total recorded wall clock time in nanoseconds. + * + * @return recorded wall time in nanoseconds + */ + public long getTotalWallTime() { + return totalWallTime; + } + + public long getAvgWallTime() { + return totalWallTime > 0 && measurements > 0 ? totalWallTime / measurements : -1; + } + + /** + * Return true if the timer is running. + * + * @return true if running + */ + public boolean isRunning() { + return isRunning; + } + + /** + * Start the timer. + */ + public synchronized void start() { + if ((todoFlags & RECORD_CPUTIME) != 0) { + currentStartCpuTime = getThreadCpuTime(threadId); + } else { + currentStartCpuTime = -1; + } + if ((todoFlags & RECORD_WALLTIME) != 0) { + currentStartWallTime = System.nanoTime(); + } else { + currentStartWallTime = -1; + } + isRunning = true; + } + + /** + * Stop the timer (if running) and reset all recorded values. + */ + public synchronized void reset() { + currentStartCpuTime = -1; + currentStartWallTime = -1; + totalCpuTime = 0; + totalWallTime = 0; + measurements = 0; + isRunning = false; + threadCount = 0; + } + + /** + * Stop the timer and return the CPU time that has passed since it had last been + * started. The total time (both system and CPU) of all start-stop cycles is + * recorded with the timer. + * + * @return CPU time that the timer was running, or -1 if timer not running or + * CPU time unavailable for other reasons + */ + public synchronized long stop() { + long totalTime = -1; + + if ((todoFlags & RECORD_CPUTIME) != 0 && (currentStartCpuTime != -1)) { + long cpuTime = getThreadCpuTime(threadId); + if (cpuTime != -1) { // may fail if thread already dead + totalTime = cpuTime - currentStartCpuTime; + totalCpuTime += totalTime; + } + } + + if ((todoFlags & RECORD_WALLTIME) != 0 && (currentStartWallTime != -1)) { + long wallTime = System.nanoTime(); + totalWallTime += wallTime - currentStartWallTime; + } + + if (isRunning) { + measurements += 1; + isRunning = false; + } + + currentStartWallTime = -1; + currentStartCpuTime = -1; + + return totalTime; + } + + /** + * Print logging information for the timer. The log only shows the recorded time + * of the completed start-stop cycles. If the timer is still running, then it + * will not be stopped to add the currently measured time to the output but a + * warning will be logged. + * + */ + public void log() { + if (LOGGER.isInfoEnabled()) { + String timerLabel; + if (threadId != 0) { + timerLabel = name + " (thread " + threadId + ")"; + } else if (threadCount > 1) { + timerLabel = name + " (over " + threadCount + " threads)"; + } else { + timerLabel = name; + } + + if (todoFlags == RECORD_NONE) { + LOGGER.info("Timer " + timerLabel + " recorded " + measurements + " run(s), no times taken"); + } else { + String labels = ""; + String values = ""; + String separator; + + if ((todoFlags & RECORD_CPUTIME) != 0 && threadId != 0) { + labels += "CPU"; + values += totalCpuTime / 1000000; + separator = "/"; + } else { + separator = ""; + } + if ((todoFlags & RECORD_WALLTIME) != 0) { + labels += separator + "Wall"; + values += separator + totalWallTime / 1000000; + } + if ((todoFlags & RECORD_CPUTIME) != 0 && threadId != 0) { + labels += "/CPU avg"; + values += "/" + (float) (totalCpuTime) / measurements / 1000000; + } + if ((todoFlags & RECORD_WALLTIME) != 0) { + labels += "/Wall avg"; + values += "/" + (float) (totalWallTime) / measurements / 1000000; + } + if (threadCount > 1) { + if ((todoFlags & RECORD_CPUTIME) != 0 && threadId != 0) { + labels += "/CPU per thread"; + values += "/" + (float) (totalCpuTime) / threadCount / 1000000; + } + if ((todoFlags & RECORD_WALLTIME) != 0) { + labels += "/Wall per thread"; + values += "/" + (float) (totalWallTime) / threadCount / 1000000; + } + } + + LOGGER.info( + "Time for " + timerLabel + " for " + measurements + " run(s) " + labels + " (ms): " + values); + } + + if (isRunning) { + LOGGER.warn("Timer " + timerLabel + " logged while it was still running"); + } + } + } + + /** + * Start a timer of the given string name for all todos and the current thread. + * If no such timer exists yet, then it will be newly created. + * + * @param timerName the name of the timer + */ + public static void startNamedTimer(String timerName) { + getNamedTimer(timerName).start(); + } + + /** + * Start a timer of the given string name for the current thread. If no such + * timer exists yet, then it will be newly created. + * + * @param timerName the name of the timer + * @param todoFlags + */ + public static void startNamedTimer(String timerName, int todoFlags) { + getNamedTimer(timerName, todoFlags).start(); + } + + /** + * Start a timer of the given string name for the current thread. If no such + * timer exists yet, then it will be newly created. + * + * @param timerName the name of the timer + * @param todoFlags + * @param threadId of the thread to track, or 0 if only system clock should be + * tracked + */ + public static void startNamedTimer(String timerName, int todoFlags, long threadId) { + getNamedTimer(timerName, todoFlags, threadId).start(); + } + + /** + * Stop a timer of the given string name for all todos and the current thread. + * If no such timer exists, -1 will be returned. Otherwise the return value is + * the CPU time that was measured. + * + * @param timerName the name of the timer + * @return CPU time if timer existed and was running, and -1 otherwise + */ + public static long stopNamedTimer(String timerName) { + return stopNamedTimer(timerName, RECORD_ALL, Thread.currentThread().getId()); + } + + /** + * Stop a timer of the given string name for the current thread. If no such + * timer exists, -1 will be returned. Otherwise the return value is the CPU time + * that was measured. + * + * @param timerName the name of the timer + * @param todoFlags + * @return CPU time if timer existed and was running, and -1 otherwise + */ + public static long stopNamedTimer(String timerName, int todoFlags) { + return stopNamedTimer(timerName, todoFlags, Thread.currentThread().getId()); + } + + /** + * Stop a timer of the given string name for the given thread. If no such timer + * exists, -1 will be returned. Otherwise the return value is the CPU time that + * was measured. + * + * @param timerName the name of the timer + * @param todoFlags + * @param threadId of the thread to track, or 0 if only system clock should be + * tracked + * @return CPU time if timer existed and was running, and -1 otherwise + */ + public static long stopNamedTimer(String timerName, int todoFlags, long threadId) { + Timer key = new Timer(timerName, todoFlags, threadId); + if (registeredTimers.containsKey(key)) { + return registeredTimers.get(key).stop(); + } else { + return -1; + } + } + + /** + * Reset a timer of the given string name for all todos and the current thread. + * If no such timer exists yet, then it will be newly created. + * + * @param timerName the name of the timer + */ + public static void resetNamedTimer(String timerName) { + getNamedTimer(timerName).reset(); + } + + /** + * Reset a timer of the given string name for the current thread. If no such + * timer exists yet, then it will be newly created. + * + * @param timerName the name of the timer + * @param todoFlags + */ + public static void resetNamedTimer(String timerName, int todoFlags) { + getNamedTimer(timerName, todoFlags).reset(); + } + + /** + * Reset a timer of the given string name for the given thread. If no such timer + * exists yet, then it will be newly created. + * + * @param timerName the name of the timer + * @param todoFlags + * @param threadId of the thread to track, or 0 if only system clock should be + * tracked + */ + public static void resetNamedTimer(String timerName, int todoFlags, long threadId) { + getNamedTimer(timerName, todoFlags, threadId).reset(); + } + + /** + * Get a timer of the given string name that takes all possible times (todos) + * for the current thread. If no such timer exists yet, then it will be newly + * created. + * + * @param timerName the name of the timer + * @return timer + */ + public static Timer getNamedTimer(String timerName) { + return getNamedTimer(timerName, RECORD_ALL, Thread.currentThread().getId()); + } + + /** + * Returns all registered timers + * + * @return an iterable collection of named timers + */ + public static Iterable getNamedTimers() { + return registeredTimers.keySet(); + } + + /** + * Get a timer of the given string name and todos for the current thread. If no + * such timer exists yet, then it will be newly created. + * + * @param timerName the name of the timer + * @param todoFlags + * @return timer + */ + public static Timer getNamedTimer(String timerName, int todoFlags) { + return getNamedTimer(timerName, todoFlags, Thread.currentThread().getId()); + } + + /** + * Get a timer of the given string name for the given thread. If no such timer + * exists yet, then it will be newly created. + * + * @param timerName the name of the timer + * @param todoFlags + * @param threadId of the thread to track, or 0 if only system clock should be + * tracked + * @return timer + */ + public static Timer getNamedTimer(String timerName, int todoFlags, long threadId) { + Timer key = new Timer(timerName, todoFlags, threadId); + Timer previous = registeredTimers.putIfAbsent(key, key); + if (previous != null) { + return previous; + } + // else + return key; + } + + /** + * Collect the total times measured by all known named timers of the given name. + * + * @param timerName + * @return timer + */ + public static Timer getNamedTotalTimer(String timerName) { + long totalCpuTime = 0; + long totalSystemTime = 0; + int measurements = 0; + int threadCount = 0; + int todoFlags = RECORD_NONE; + Timer previousTimer = null; + for (Map.Entry entry : registeredTimers.entrySet()) { + if (entry.getValue().name.equals(timerName)) { + previousTimer = entry.getValue(); + threadCount += 1; + totalCpuTime += previousTimer.totalCpuTime; + totalSystemTime += previousTimer.totalWallTime; + measurements += previousTimer.measurements; + todoFlags |= previousTimer.todoFlags; + } + } + + if (threadCount == 1) { + return previousTimer; + } else { + Timer result = new Timer(timerName, todoFlags, 0); + result.totalCpuTime = totalCpuTime; + result.totalWallTime = totalSystemTime; + result.measurements = measurements; + result.threadCount = threadCount; + return result; + } + } + + public static void logAllNamedTimers(String timerName) { + for (Map.Entry entry : registeredTimers.entrySet()) { + if (entry.getValue().name.equals(timerName)) { + entry.getValue().log(); + } + } + } + + @Override + public int hashCode() { + // Jenkins hash, see http://www.burtleburtle.net/bob/hash/doobs.html and also + // http://en.wikipedia.org/wiki/Jenkins_hash_function. + int hash = name.hashCode(); + hash += (hash << 10); + hash ^= (hash >> 6); + hash += Long.valueOf(threadId).hashCode(); + hash += (hash << 10); + hash ^= (hash >> 6); + hash += Integer.valueOf(todoFlags).hashCode(); + hash += (hash << 10); + hash ^= (hash >> 6); + + hash += (hash << 3); + hash ^= (hash >> 11); + hash += (hash << 15); + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } else if (obj == null) { + return false; + } else if (getClass() != obj.getClass()) { + return false; + } else if (threadId == ((Timer) obj).threadId && todoFlags == ((Timer) obj).todoFlags + && name.equals(((Timer) obj).name)) { + return true; + } else { + return false; + } + } + + protected static long getThreadCpuTime(long threadId) { + if (threadId == 0) { // generally invalid + return 0; + } else { + return tmxb.getThreadCpuTime(threadId); + } + } + +} From 5a7cd8972ef5d57ed749c6a3d6bc07334f69b256 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 12 Aug 2020 23:48:19 +0200 Subject: [PATCH 0644/1003] License header --- .../rulewerk/core/model/api/Command.java | 20 +++++++++++++++++++ .../rulewerk/parser/CommandParserTest.java | 20 +++++++++++++++++++ 2 files changed, 40 insertions(+) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java index 855652158..1634ae277 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java @@ -1,5 +1,25 @@ package org.semanticweb.rulewerk.core.model.api; +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import java.util.List; import org.semanticweb.rulewerk.core.model.implementation.Serializer; diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java index 93c527d1d..f81d83088 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java @@ -1,5 +1,25 @@ package org.semanticweb.rulewerk.parser; +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import static org.junit.Assert.*; import org.junit.Test; From d07e95c1838ed5b171af7f80e77e422c3fc66095 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 12 Aug 2020 23:49:36 +0200 Subject: [PATCH 0645/1003] New module for interpreting commands --- pom.xml | 13 +- rulewerk-commands/LICENSE.txt | 201 ++++++++++++++++++ rulewerk-commands/pom.xml | 31 +++ .../commands/AssertCommandInterpreter.java | 66 ++++++ .../commands/CommandExecutionException.java | 44 ++++ .../rulewerk/commands/CommandInterpreter.java | 58 +++++ .../commands/HelpCommandInterpreter.java | 59 +++++ .../rulewerk/commands/Interpreter.java | 84 ++++++++ .../commands/LoadCommandInterpreter.java | 61 ++++++ .../commands/QueryCommandInterpreter.java | 87 ++++++++ .../commands/ReasonCommandInterpreter.java | 59 +++++ rulewerk-examples/pom.xml | 25 ++- 12 files changed, 771 insertions(+), 17 deletions(-) create mode 100644 rulewerk-commands/LICENSE.txt create mode 100644 rulewerk-commands/pom.xml create mode 100644 rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java create mode 100644 rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandExecutionException.java create mode 100644 rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandInterpreter.java create mode 100644 rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java create mode 100644 rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java create mode 100644 rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java create mode 100644 rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java create mode 100644 rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java diff --git a/pom.xml b/pom.xml index 47efeb4a6..17fe39ba5 100644 --- a/pom.xml +++ b/pom.xml @@ -1,7 +1,5 @@ - + 4.0.0 @@ -23,10 +21,11 @@ rulewerk-owlapi rulewerk-graal rulewerk-parser + rulewerk-commands rulewerk-examples rulewerk-client coverage - + @@ -130,7 +129,7 @@ org.codehaus.mojo license-maven-plugin 1.14 - + first @@ -173,7 +172,7 @@ - + @@ -188,7 +187,7 @@ - + diff --git a/rulewerk-commands/LICENSE.txt b/rulewerk-commands/LICENSE.txt new file mode 100644 index 000000000..261eeb9e9 --- /dev/null +++ b/rulewerk-commands/LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/rulewerk-commands/pom.xml b/rulewerk-commands/pom.xml new file mode 100644 index 000000000..9e4eb1374 --- /dev/null +++ b/rulewerk-commands/pom.xml @@ -0,0 +1,31 @@ + + + + 4.0.0 + + + org.semanticweb.rulewerk + rulewerk-parent + 0.7.0-SNAPSHOT + + + rulewerk-commands + jar + + Rulewerk command execution support + API for interpreting shell commands to control Rulewerk + + + + ${project.groupId} + rulewerk-core + ${project.version} + + + ${project.groupId} + rulewerk-parser + ${project.version} + + + diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java new file mode 100644 index 000000000..0a4d0075e --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java @@ -0,0 +1,66 @@ +package org.semanticweb.rulewerk.commands; + +import org.semanticweb.rulewerk.core.model.api.Argument; +import org.semanticweb.rulewerk.core.model.api.Command; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; + +public class AssertCommandInterpreter implements CommandInterpreter { + + @Override + public void run(Command command, Interpreter interpreter) + throws CommandExecutionException { + + for (Argument argument : command.getArguments()) { + if (argument.fromPositiveLiteral().isPresent()) { + PositiveLiteral literal = argument.fromPositiveLiteral().get(); + Fact fact; + try { + fact = Expressions.makeFact(literal.getPredicate(), literal.getArguments()); + } catch (IllegalArgumentException e) { + throw new CommandExecutionException("Literal " + literal.toString() + " is not a fact.", e); + } + interpreter.getReasoner().getKnowledgeBase().addStatement(fact); + } else if (argument.fromRule().isPresent()) { + interpreter.getReasoner().getKnowledgeBase().addStatement(argument.fromRule().get()); + } else { + throw new CommandExecutionException("Only facts and rules can be asserted."); + } + } + } + + @Override + public String getHelp(String commandName) { + return "Usage: @" + commandName + " ()+ .\n" + + " fact or rule: statement(s) to be added to the knowledge base\n" + + "Reasoning needs to be invoked after finishing addition of statements."; + } + + @Override + public String getSynopsis() { + return "add facts and rules to the knowledge base"; + } + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandExecutionException.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandExecutionException.java new file mode 100644 index 000000000..9b9a5c6b0 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandExecutionException.java @@ -0,0 +1,44 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.exceptions.RulewerkException; + +public class CommandExecutionException extends RulewerkException { + + /** + * Generated serial version UID + */ + private static final long serialVersionUID = 1479091500621334935L; + + public CommandExecutionException(Throwable cause) { + super(cause); + } + + public CommandExecutionException(String message, Throwable cause) { + super(message, cause); + } + + public CommandExecutionException(String message) { + super(message); + } + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandInterpreter.java new file mode 100644 index 000000000..7959a3376 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandInterpreter.java @@ -0,0 +1,58 @@ +package org.semanticweb.rulewerk.commands; + +import org.semanticweb.rulewerk.core.model.api.Command; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * Interface for classes that interpret (execute) specific commands. + * + * @author Markus Kroetzsch + * + */ +public interface CommandInterpreter { + + /** + * Execute the commands in the context of the given reasoner and output stream. + * + * @param command command to be interpreted + * @param interpreter surrounding interpreter that provides the execution + * context + */ + void run(Command command, Interpreter interpreter) throws CommandExecutionException; + + /** + * Return a text that describes command use and parameters, using the given + * command name. The output should start with a "Usage:" line, followed by + * single-space-indented parameter descriptions. + * + * @return help message + */ + String getHelp(String commandName); + + /** + * Returns a short line describing the purpose of the command. + * + * @return short command synopsis + */ + String getSynopsis(); + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java new file mode 100644 index 000000000..a94b0fa30 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java @@ -0,0 +1,59 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.TermType; + +public class HelpCommandInterpreter implements CommandInterpreter { + + @Override + public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + if (command.getArguments().size() == 0) { + interpreter.getOut().println("Available commands:"); + for (String commandName : interpreter.commandInterpreters.keySet()) { + interpreter.getOut().println( + " @" + commandName + ": " + interpreter.commandInterpreters.get(commandName).getSynopsis()); + } + } else if (command.getArguments().size() == 1 && command.getArguments().get(0).fromTerm().isPresent() + && command.getArguments().get(0).fromTerm().get().getType() == TermType.ABSTRACT_CONSTANT) { + String helpCommand = command.getArguments().get(0).fromTerm().get().getName(); + if (interpreter.commandInterpreters.containsKey(helpCommand)) { + interpreter.getOut().println(interpreter.commandInterpreters.get(helpCommand).getHelp(helpCommand)); + } else { + interpreter.getOut().println("Command '" + helpCommand + "' not known."); + } + } else { + interpreter.getOut().println(getHelp(command.getName())); + } + } + + @Override + public String getHelp(String commandName) { + return "Usage: @" + commandName + " [command name] .\n" + "\t command name: command to get detailed help for"; + } + + @Override + public String getSynopsis() { + return "print help on available commands"; + } + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java new file mode 100644 index 000000000..959a0d6d6 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -0,0 +1,84 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.PrintStream; +import java.util.HashMap; +import java.util.List; + +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.slf4j.Logger; + +public class Interpreter { + + final Reasoner reasoner; + final PrintStream out; + final Logger logger; + + final HashMap commandInterpreters = new HashMap<>(); + + public Interpreter(Reasoner reasoner, PrintStream out, Logger logger) { + this.reasoner = reasoner; + this.out = out; + this.logger = logger; + registerDefaultCommandInterpreters(); + } + + public void registerCommandInterpreter(String command, CommandInterpreter commandInterpreter) { + commandInterpreters.put(command, commandInterpreter); + } + + public void runCommands(List commands) throws CommandExecutionException { + for (Command command : commands) { + runCommand(command); + } + } + + public void runCommand(Command command) throws CommandExecutionException { + if (commandInterpreters.containsKey(command.getName())) { + try { + commandInterpreters.get(command.getName()).run(command, this); + } catch (Exception e) { + throw new CommandExecutionException(e.getMessage(), e); + } + } else { + throw new CommandExecutionException("Unknown command '" + command.getName() + "'"); + } + } + + public Reasoner getReasoner() { + return reasoner; + } + + public PrintStream getOut() { + return out; + } + + private void registerDefaultCommandInterpreters() { + registerCommandInterpreter("help", new HelpCommandInterpreter()); + registerCommandInterpreter("assert", new AssertCommandInterpreter()); + registerCommandInterpreter("query", new QueryCommandInterpreter()); + registerCommandInterpreter("reason", new ReasonCommandInterpreter()); + registerCommandInterpreter("load", new LoadCommandInterpreter()); + } + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java new file mode 100644 index 000000000..b52d078de --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -0,0 +1,61 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.FileInputStream; +import java.io.FileNotFoundException; + +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; + +public class LoadCommandInterpreter implements CommandInterpreter { + + @Override + public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + if (command.getArguments().size() == 1 && command.getArguments().get(0).fromString().isPresent()) { + String fileName = command.getArguments().get(0).fromString().get(); + try { + FileInputStream fileInputStream = new FileInputStream(fileName); + RuleParser.parseInto(interpreter.getReasoner().getKnowledgeBase(), fileInputStream); + } catch (FileNotFoundException e) { + throw new CommandExecutionException(e.getMessage(), e); + } catch (ParsingException e) { + interpreter.getOut().println("Error parsing file: " + e.getMessage()); + } + + } else { + throw new CommandExecutionException(getHelp(command.getName())); + } + + } + + @Override + public String getHelp(String commandName) { + return "Usage: @" + commandName + " \n" + " file: path to a Rulewerk rls file"; + } + + @Override + public String getSynopsis() { + return "load a knowledge base from file (in Rulewerk rls format)"; + } + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java new file mode 100644 index 000000000..bc8f69056 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java @@ -0,0 +1,87 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.List; + +import org.semanticweb.rulewerk.core.model.api.Argument; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Terms; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Timer; + +public class QueryCommandInterpreter implements CommandInterpreter { + + public static Term KEYWORD_LIMIT = Expressions.makeAbstractConstant("LIMIT"); + + @Override + public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + + List arguments = command.getArguments(); + PositiveLiteral literal; + + if (arguments.size() > 0 && arguments.get(0).fromPositiveLiteral().isPresent()) { + literal = arguments.get(0).fromPositiveLiteral().get(); + } else { + throw new CommandExecutionException("First argument must be a query literal."); + } + + int limit = -1; + if (arguments.size() == 3 && KEYWORD_LIMIT.equals(arguments.get(1).fromTerm().orElse(null)) + && arguments.get(2).fromTerm().isPresent()) { + try { + limit = Terms.extractInt(arguments.get(2).fromTerm().get()); + } catch (IllegalArgumentException e) { + throw new CommandExecutionException("Invalid limit given: " + arguments.get(3).fromTerm().get()); + } + } else if (arguments.size() != 1) { + throw new CommandExecutionException("Unrecognized arguments"); + } + + Timer timer = new Timer("query"); + timer.start(); + try (final QueryResultIterator answers = interpreter.getReasoner().answerQuery(literal, true)) { + int count = 0; + while (count != limit && answers.hasNext()) { + interpreter.getOut().println(" " + answers.next()); + count++; + } + timer.stop(); + interpreter.getOut().println(count + " result(s) in " + timer.getTotalCpuTime() / 1000000 + + "ms. Results are " + answers.getCorrectness() + "."); + } + } + + @Override + public String getHelp(String commandName) { + return "Usage: @" + commandName + " [LIMIT ] .\n" + + " query literal: positive literal; may use ?queryVariables and ?existentialVariables\n" + + " limit: maximal number of results to be shown"; + } + + @Override + public String getSynopsis() { + return "print results to queries"; + } +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java new file mode 100644 index 000000000..13753e8f4 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java @@ -0,0 +1,59 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.reasoner.Timer; + +public class ReasonCommandInterpreter implements CommandInterpreter { + + @Override + public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + + if (command.getArguments().size() > 0) { + throw new CommandExecutionException("This command supports no arguments."); + } + + Timer timer = new Timer("reasoning"); + timer.start(); + try { + interpreter.getReasoner().reason(); + } catch (IOException e) { + throw new CommandExecutionException(e.getMessage(), e); + } + timer.stop(); + interpreter.getOut() + .println("Loading and materialization finished in " + timer.getTotalCpuTime() / 1000000 + "ms."); + } + + @Override + public String getHelp(String commandName) { + return "Usage: @" + commandName + " ."; + } + + @Override + public String getSynopsis() { + return "load data and compute conclusions from knowledge base"; + } + +} diff --git a/rulewerk-examples/pom.xml b/rulewerk-examples/pom.xml index d4abb343b..975a3b56d 100644 --- a/rulewerk-examples/pom.xml +++ b/rulewerk-examples/pom.xml @@ -37,16 +37,21 @@ rulewerk-graal ${project.version} - - ${project.groupId} - rulewerk-parser - ${project.version} - - - ${project.groupId} - rulewerk-vlog - ${project.version} - + + ${project.groupId} + rulewerk-parser + ${project.version} + + + ${project.groupId} + rulewerk-vlog + ${project.version} + + + ${project.groupId} + rulewerk-commands + ${project.version} + org.slf4j slf4j-log4j12 From 51ae716b8baa7676b3e1881825a6e2d27239a87a Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Thu, 13 Aug 2020 17:52:23 +0200 Subject: [PATCH 0646/1003] fix bug #182 query answering correctness when vLog does not know predicate. --- .../rulewerk/reasoner/vlog/VLogReasoner.java | 179 ++++++++++-------- .../reasoner/vlog/AddDataSourceTest.java | 22 +-- .../vlog/QueryAnsweringCorrectnessTest.java | 95 +++++++++- 3 files changed, 207 insertions(+), 89 deletions(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java index 8f17d699f..3340b0bd3 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java @@ -100,7 +100,7 @@ public VLogReasoner(KnowledgeBase knowledgeBase) { this.knowledgeBase = knowledgeBase; this.knowledgeBase.addListener(this); - setLogLevel(this.internalLogLevel); + this.setLogLevel(this.internalLogLevel); } @Override @@ -111,7 +111,7 @@ public KnowledgeBase getKnowledgeBase() { @Override public void setAlgorithm(final Algorithm algorithm) { Validate.notNull(algorithm, "Algorithm cannot be null!"); - validateNotClosed(); + this.validateNotClosed(); this.algorithm = algorithm; } @@ -122,7 +122,7 @@ public Algorithm getAlgorithm() { @Override public void setReasoningTimeout(Integer seconds) { - validateNotClosed(); + this.validateNotClosed(); if (seconds != null) { Validate.isTrue(seconds > 0, "Only strictly positive timeout period allowed!", seconds); } @@ -136,7 +136,7 @@ public Integer getReasoningTimeout() { @Override public void setRuleRewriteStrategy(RuleRewriteStrategy ruleRewritingStrategy) { - validateNotClosed(); + this.validateNotClosed(); Validate.notNull(ruleRewritingStrategy, "Rewrite strategy cannot be null!"); this.ruleRewriteStrategy = ruleRewritingStrategy; } @@ -145,29 +145,33 @@ public void setRuleRewriteStrategy(RuleRewriteStrategy ruleRewritingStrategy) { public RuleRewriteStrategy getRuleRewriteStrategy() { return this.ruleRewriteStrategy; } + + @Override + public Correctness getCorrectness() { + return this.correctness; + } /* * TODO Due to automatic predicate renaming, it can happen that an EDB predicate * cannot be queried after loading unless reasoning has already been invoked * (since the auxiliary rule that imports the EDB facts to the "real" predicate * must be used). This issue could be weakened by rewriting queries to - * (single-source) EDB predicates internally when in such a state, + * (single-source) EDB predicates internally when in such a state. */ - // @Override void load() throws IOException { - validateNotClosed(); + this.validateNotClosed(); switch (this.reasonerState) { case KB_NOT_LOADED: - loadKnowledgeBase(); + this.loadKnowledgeBase(); break; case KB_LOADED: case MATERIALISED: // do nothing, all KB is already loaded break; case KB_CHANGED: - resetReasoner(); - loadKnowledgeBase(); + this.resetReasoner(); + this.loadKnowledgeBase(); default: break; } @@ -183,16 +187,16 @@ void loadKnowledgeBase() throws IOException { } // 1. vLog is initialized by loading VLog data sources - loadVLogDataSources(vLogKB); + this.loadVLogDataSources(vLogKB); // 2. in-memory data is loaded - loadInMemoryDataSources(vLogKB); - validateDataSourcePredicateArities(vLogKB); + this.loadInMemoryDataSources(vLogKB); + this.validateDataSourcePredicateArities(vLogKB); - loadFacts(vLogKB); + this.loadFacts(vLogKB); // 3. rules are loaded - loadRules(vLogKB); + this.loadRules(vLogKB); this.reasonerState = ReasonerState.KB_LOADED; @@ -213,9 +217,9 @@ void loadVLogDataSources(final VLogKnowledgeBase vLogKB) throws IOException { } void loadInMemoryDataSources(final VLogKnowledgeBase vLogKB) { - vLogKB.getEdbPredicates().forEach((k, v) -> loadInMemoryDataSource(v.getDataSource(), k)); + vLogKB.getEdbPredicates().forEach((k, v) -> this.loadInMemoryDataSource(v.getDataSource(), k)); - vLogKB.getAliasesForEdbPredicates().forEach((k, v) -> loadInMemoryDataSource(k.getDataSource(), v)); + vLogKB.getAliasesForEdbPredicates().forEach((k, v) -> this.loadInMemoryDataSource(k.getDataSource(), v)); } void loadInMemoryDataSource(final DataSource dataSource, final Predicate predicate) { @@ -223,7 +227,7 @@ void loadInMemoryDataSource(final DataSource dataSource, final Predicate predica final VLogInMemoryDataSource inMemoryDataSource = (VLogInMemoryDataSource) dataSource; try { - load(predicate, inMemoryDataSource); + this.load(predicate, inMemoryDataSource); } catch (final EDBConfigurationException e) { throw new RulewerkRuntimeException("Invalid data sources configuration!", e); } @@ -252,9 +256,10 @@ void load(final Predicate predicate, final VLogInMemoryDataSource inMemoryDataSo */ void validateDataSourcePredicateArities(final VLogKnowledgeBase vLogKB) throws IncompatiblePredicateArityException { - vLogKB.getEdbPredicates().forEach((k, v) -> validateDataSourcePredicateArity(k, v.getDataSource())); + vLogKB.getEdbPredicates().forEach((k, v) -> this.validateDataSourcePredicateArity(k, v.getDataSource())); - vLogKB.getAliasesForEdbPredicates().forEach((k, v) -> validateDataSourcePredicateArity(v, k.getDataSource())); + vLogKB.getAliasesForEdbPredicates() + .forEach((k, v) -> this.validateDataSourcePredicateArity(v, k.getDataSource())); } /** @@ -325,23 +330,23 @@ void loadRules(final VLogKnowledgeBase vLogKB) { @Override public boolean reason() throws IOException { - validateNotClosed(); + this.validateNotClosed(); switch (this.reasonerState) { case KB_NOT_LOADED: - load(); - runChase(); + this.load(); + this.runChase(); break; case KB_LOADED: - runChase(); + this.runChase(); break; case KB_CHANGED: - resetReasoner(); - load(); - runChase(); + this.resetReasoner(); + this.load(); + this.runChase(); break; case MATERIALISED: - runChase(); + this.runChase(); break; default: break; @@ -383,29 +388,49 @@ private void runChase() { @Override public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNulls) { - validateBeforeQuerying(query); + this.validateBeforeQuerying(query); final boolean filterBlanks = !includeNulls; final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); - karmaresearch.vlog.QueryResultIterator queryResultIterator; + final karmaresearch.vlog.QueryResultIterator queryResultIterator; try { final int predicateId = this.vLog.getPredicateId(vLogAtom.getPredicate()); - final long[] terms = extractTerms(vLogAtom.getTerms()); + final long[] terms = this.extractTerms(vLogAtom.getTerms()); queryResultIterator = this.vLog.query(predicateId, terms, true, filterBlanks); } catch (final NotStartedException e) { throw new RulewerkRuntimeException("Inconsistent reasoner state.", e); } catch (final NonExistingPredicateException e1) { - LOGGER.warn("Query uses predicate " + query.getPredicate() - + " that does not occur in the knowledge base. Answer must be empty!"); - return new EmptyQueryResultIterator(Correctness.SOUND_AND_COMPLETE); + return this.createEmptyResultIterator(query); } - logWarningOnCorrectness(); + this.logWarningOnCorrectness(this.correctness); return new VLogFastQueryResultIterator(queryResultIterator, this.correctness, this.vLog); } + private QueryResultIterator createEmptyResultIterator(final PositiveLiteral query) { + final Correctness answerCorrectness = this.getCorrectnessUnknownPredicate(query); + this.logWarningOnCorrectness(answerCorrectness); + return new EmptyQueryResultIterator(answerCorrectness); + } + + private Correctness getCorrectnessUnknownPredicate(final PositiveLiteral query) { + final Correctness answerCorrectness; + if (this.reasonerState == ReasonerState.MATERIALISED) { + this.warnUnknownPredicate(query); + answerCorrectness = Correctness.SOUND_AND_COMPLETE; + } else { + answerCorrectness = Correctness.SOUND_BUT_INCOMPLETE; + } + return answerCorrectness; + } + + private void warnUnknownPredicate(final PositiveLiteral query) { + LOGGER.warn("Query uses predicate " + query.getPredicate() + + " that does not occur in the materialised knowledge base. Answer must be empty!"); + } + /** * Utility method copied from {@link karmaresearch.vlog.VLog}. * @@ -441,7 +466,7 @@ private long[] extractTerms(karmaresearch.vlog.Term[] terms) throws NotStartedEx @Override public QueryAnswerCount countQueryAnswers(PositiveLiteral query, boolean includeNulls) { - validateBeforeQuerying(query); + this.validateBeforeQuerying(query); final boolean filterBlanks = !includeNulls; final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); @@ -452,18 +477,22 @@ public QueryAnswerCount countQueryAnswers(PositiveLiteral query, boolean include } catch (NotStartedException e) { throw new RulewerkRuntimeException("Inconsistent reasoner state.", e); } catch (NonExistingPredicateException e) { - LOGGER.warn("Query uses predicate " + query.getPredicate() - + " that does not occur in the knowledge base. Answer must be empty!"); - result = 0; + return this.createEmptyResultCount(query); } - logWarningOnCorrectness(); + this.logWarningOnCorrectness(this.correctness); return new QueryAnswerCountImpl(this.correctness, result); } + private QueryAnswerCount createEmptyResultCount(final PositiveLiteral query) { + final Correctness correctness = this.getCorrectnessUnknownPredicate(query); + this.logWarningOnCorrectness(correctness); + return new QueryAnswerCountImpl(correctness, 0); + } + @Override public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final String csvFilePath, final boolean includeBlanks) throws IOException { - validateBeforeQuerying(query); + this.validateBeforeQuerying(query); Validate.notNull(csvFilePath, "File to export query answer to must not be null!"); Validate.isTrue(csvFilePath.endsWith(".csv"), "Expected .csv extension for file [%s]!", csvFilePath); @@ -475,33 +504,33 @@ public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final St } catch (final NotStartedException e) { throw new RulewerkRuntimeException("Inconsistent reasoner state!", e); } catch (final NonExistingPredicateException e1) { - LOGGER.warn("Query uses predicate " + query.getPredicate() - + " that does not occur in the knowledge base. Answers are therefore empty."); + final Correctness correctness = this.getCorrectnessUnknownPredicate(query); + this.logWarningOnCorrectness(correctness); + return correctness; } - - logWarningOnCorrectness(); + this.logWarningOnCorrectness(this.correctness); return this.correctness; } private void validateBeforeQuerying(final PositiveLiteral query) { - validateNotClosed(); + this.validateNotClosed(); if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { - throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); + throw new ReasonerStateException(this.reasonerState, "Querying is not allowed before reasoner is loaded!"); } Validate.notNull(query, "Query atom must not be null!"); } @Override public Correctness forEachInference(InferenceAction action) throws IOException { - validateNotClosed(); + this.validateNotClosed(); if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { throw new ReasonerStateException(this.reasonerState, "Obtaining inferences is not alowed before reasoner is loaded!"); } - final Set toBeQueriedHeadPredicates = getKnowledgeBasePredicates(); + final Set toBeQueriedHeadPredicates = this.getKnowledgeBasePredicates(); for (final Predicate predicate : toBeQueriedHeadPredicates) { - final PositiveLiteral queryAtom = getQueryAtom(predicate); + final PositiveLiteral queryAtom = this.getQueryAtom(predicate); final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(queryAtom); try (final TermQueryResultIterator answers = this.vLog.query(vLogAtom, true, false)) { while (answers.hasNext()) { @@ -516,23 +545,19 @@ public Correctness forEachInference(InferenceAction action) throws IOException { } } - logWarningOnCorrectness(); - return this.correctness; - } - - public Correctness getCorrectness() { + this.logWarningOnCorrectness(this.correctness); return this.correctness; } - private void logWarningOnCorrectness() { - if (this.correctness != Correctness.SOUND_AND_COMPLETE) { + private void logWarningOnCorrectness(final Correctness correctness) { + if (correctness != Correctness.SOUND_AND_COMPLETE) { LOGGER.warn("Query answers may be {} with respect to the current Knowledge Base!", this.correctness); } } @Override public void resetReasoner() { - validateNotClosed(); + this.validateNotClosed(); this.reasonerState = ReasonerState.KB_NOT_LOADED; this.vLog.stop(); LOGGER.info("Reasoner has been reset. All inferences computed during reasoning have been discarded."); @@ -552,7 +577,7 @@ public void close() { @Override public void setLogLevel(LogLevel logLevel) { - validateNotClosed(); + this.validateNotClosed(); Validate.notNull(logLevel, "Log level cannot be null!"); this.internalLogLevel = logLevel; this.vLog.setLogLevel(ModelToVLogConverter.toVLogLogLevel(this.internalLogLevel)); @@ -565,33 +590,33 @@ public LogLevel getLogLevel() { @Override public void setLogFile(String filePath) { - validateNotClosed(); + this.validateNotClosed(); this.vLog.setLogFile(filePath); } @Override public boolean isJA() { - return checkAcyclicity(AcyclicityNotion.JA); + return this.checkAcyclicity(AcyclicityNotion.JA); } @Override public boolean isRJA() { - return checkAcyclicity(AcyclicityNotion.RJA); + return this.checkAcyclicity(AcyclicityNotion.RJA); } @Override public boolean isMFA() { - return checkAcyclicity(AcyclicityNotion.MFA); + return this.checkAcyclicity(AcyclicityNotion.MFA); } @Override public boolean isRMFA() { - return checkAcyclicity(AcyclicityNotion.RMFA); + return this.checkAcyclicity(AcyclicityNotion.RMFA); } @Override public boolean isMFC() { - validateNotClosed(); + this.validateNotClosed(); if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { throw new ReasonerStateException(this.reasonerState, "Checking rules acyclicity is not allowed before loading!"); @@ -608,11 +633,11 @@ public boolean isMFC() { @Override public CyclicityResult checkForCycles() { - final boolean acyclic = isJA() || isRJA() || isMFA() || isRMFA(); + final boolean acyclic = this.isJA() || this.isRJA() || this.isMFA() || this.isRMFA(); if (acyclic) { return CyclicityResult.ACYCLIC; } else { - final boolean cyclic = isMFC(); + final boolean cyclic = this.isMFC(); if (cyclic) { return CyclicityResult.CYCLIC; } @@ -624,32 +649,32 @@ public CyclicityResult checkForCycles() { public void onStatementsAdded(List statementsAdded) { // TODO more elaborate materialisation state handling - updateReasonerToKnowledgeBaseChanged(); + this.updateReasonerToKnowledgeBaseChanged(); // updateCorrectnessOnStatementsAdded(statementsAdded); - updateCorrectnessOnStatementsAdded(); + this.updateCorrectnessOnStatementsAdded(); } @Override public void onStatementAdded(Statement statementAdded) { // TODO more elaborate materialisation state handling - updateReasonerToKnowledgeBaseChanged(); + this.updateReasonerToKnowledgeBaseChanged(); // updateCorrectnessOnStatementAdded(statementAdded); - updateCorrectnessOnStatementsAdded(); + this.updateCorrectnessOnStatementsAdded(); } @Override public void onStatementRemoved(Statement statementRemoved) { - updateReasonerToKnowledgeBaseChanged(); - updateCorrectnessOnStatementsRemoved(); + this.updateReasonerToKnowledgeBaseChanged(); + this.updateCorrectnessOnStatementsRemoved(); } @Override public void onStatementsRemoved(List statementsRemoved) { - updateReasonerToKnowledgeBaseChanged(); - updateCorrectnessOnStatementsRemoved(); + this.updateReasonerToKnowledgeBaseChanged(); + this.updateCorrectnessOnStatementsRemoved(); } Set getKnowledgeBasePredicates() { @@ -677,10 +702,10 @@ private PositiveLiteral getQueryAtom(final Predicate predicate) { } private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) { - validateNotClosed(); + this.validateNotClosed(); if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { try { - load(); + this.load(); } catch (final IOException e) { // FIXME: quick fix for https://github.com/knowsys/rulewerk/issues/128 throw new RulewerkRuntimeException(e); } diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/AddDataSourceTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/AddDataSourceTest.java index ed1aa4f23..5b329f9ba 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/AddDataSourceTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/AddDataSourceTest.java @@ -77,12 +77,12 @@ public void testAddDataSourceExistentDataForDifferentPredicates() throws IOExcep reasoner.reason(); try (final QueryResultIterator queryResult = reasoner.answerQuery( Expressions.makePositiveLiteral(predicateLArity1, Expressions.makeUniversalVariable("x")), false)) { - assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(this.csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); } try (final QueryResultIterator queryResult = reasoner.answerQuery( Expressions.makePositiveLiteral(predicateParity1, Expressions.makeUniversalVariable("x")), false)) { - assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(this.csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); } @@ -103,12 +103,12 @@ public void testAddDataSourceBeforeLoading() throws IOException { reasoner.load(); try (final QueryResultIterator queryResult = reasoner.answerQuery( Expressions.makePositiveLiteral(predicateP, Expressions.makeUniversalVariable("x")), true)) { - assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(this.csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); } try (final QueryResultIterator queryResult = reasoner.answerQuery( Expressions.makePositiveLiteral(predicateQ, Expressions.makeUniversalVariable("x")), true)) { - assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(this.csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); } @@ -133,7 +133,7 @@ public void testAddDataSourceAfterLoading() throws IOException { try (final QueryResultIterator queryResult = reasoner.answerQuery( Expressions.makePositiveLiteral(predicateP, Expressions.makeUniversalVariable("x")), true)) { - assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(this.csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); assertEquals(Correctness.INCORRECT, queryResult.getCorrectness()); } @@ -141,7 +141,7 @@ public void testAddDataSourceAfterLoading() throws IOException { try (final QueryResultIterator queryResult = reasoner.answerQuery( Expressions.makePositiveLiteral(predicateQ, Expressions.makeUniversalVariable("x")), true)) { assertFalse(queryResult.hasNext()); - assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); + assertEquals(Correctness.SOUND_BUT_INCOMPLETE, queryResult.getCorrectness()); } } } @@ -164,14 +164,14 @@ public void testAddDataSourceAfterReasoning() throws IOException { try (final QueryResultIterator queryResult = reasoner.answerQuery( Expressions.makePositiveLiteral(predicateP, Expressions.makeUniversalVariable("x")), true)) { - assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(this.csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); assertEquals(Correctness.INCORRECT, queryResult.getCorrectness()); } // there is no fact for predicate Q loaded in the reasoner try (final QueryResultIterator queryResult = reasoner.answerQuery( Expressions.makePositiveLiteral(predicateQ, Expressions.makeUniversalVariable("x")), true)) { assertFalse(queryResult.hasNext()); - assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); + assertEquals(Correctness.SOUND_BUT_INCOMPLETE, queryResult.getCorrectness()); } } } @@ -233,8 +233,8 @@ public void testAddMultipleDataSourcesForPredicateAfterReasoning() throws IOExce reasoner.reason(); try (QueryResultIterator queryResult = reasoner.answerQuery( Expressions.makePositiveLiteral(predicate, Expressions.makeUniversalVariable("x")), true)) { - final Set> expectedAnswers = new HashSet<>(csvFile_c1_c2_Content); - expectedAnswers.addAll(csvFile_c_d_Content); + final Set> expectedAnswers = new HashSet<>(this.csvFile_c1_c2_Content); + expectedAnswers.addAll(this.csvFile_c_d_Content); assertEquals(expectedAnswers, QueryResultsUtils.collectQueryResults(queryResult)); assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); @@ -258,7 +258,7 @@ public void testAddDataSourceAndFactsForPredicateAfterReasoning() throws IOExcep reasoner.reason(); try (QueryResultIterator queryResult = reasoner.answerQuery( Expressions.makePositiveLiteral(predicate, Expressions.makeUniversalVariable("x")), true)) { - final Set> expectedAnswers = new HashSet<>(csvFile_c1_c2_Content); + final Set> expectedAnswers = new HashSet<>(this.csvFile_c1_c2_Content); expectedAnswers.add(Arrays.asList(Expressions.makeAbstractConstant("a"))); assertEquals(expectedAnswers, QueryResultsUtils.collectQueryResults(queryResult)); diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryAnsweringCorrectnessTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryAnsweringCorrectnessTest.java index 583b34229..9f66c9b0a 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryAnsweringCorrectnessTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryAnsweringCorrectnessTest.java @@ -45,6 +45,7 @@ import org.semanticweb.rulewerk.core.reasoner.Algorithm; import org.semanticweb.rulewerk.core.reasoner.Correctness; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryAnswerCount; import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; @@ -94,7 +95,7 @@ public void testCorrectnessKBChanges() throws IOException { // there are no facts for Q-1 predicate try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { assertFalse(resultIterator.hasNext()); - assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + assertEquals(Correctness.SOUND_BUT_INCOMPLETE, resultIterator.getCorrectness()); } reasoner.reason(); @@ -411,6 +412,98 @@ public void testCorrectnessNoKBChanges() throws IOException { } } + @Test + public void answerQuery_PredicateNotLoaded_Materialized() throws IOException { + KnowledgeBase kb = new KnowledgeBase(); + + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + kb.addStatements(factPc); + reasoner.reason(); + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + assertFalse(resultIterator.hasNext()); + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + } + } + + @Test + public void answerQuery_PredicateNotLoaded_KbChanged() throws IOException { + KnowledgeBase kb = new KnowledgeBase(); + + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + kb.addStatements(factPc); + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleBodyPx, true)) { + assertFalse(resultIterator.hasNext()); + assertEquals(Correctness.SOUND_BUT_INCOMPLETE, resultIterator.getCorrectness()); + } + } + } + + @Test + public void countQueryAnswers_PredicateNotLoaded_Materialized() throws IOException { + KnowledgeBase kb = new KnowledgeBase(); + + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + kb.addStatements(factPc); + reasoner.reason(); + + final QueryAnswerCount resultIterator = reasoner.countQueryAnswers(ruleHeadQx); + assertEquals(0, resultIterator.getCount()); + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + } + + @Test + public void countQueryAnswers_PredicateNotLoaded_KbChanged() throws IOException { + KnowledgeBase kb = new KnowledgeBase(); + + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + kb.addStatements(factPc); + + final QueryAnswerCount resultIterator = reasoner.countQueryAnswers(ruleBodyPx); + assertEquals(0, resultIterator.getCount()); + assertEquals(Correctness.SOUND_BUT_INCOMPLETE, resultIterator.getCorrectness()); + } + + } + + @Test + public void exportQueryAnswersToCsv_PredicateNotLoaded_Materialized() throws IOException { + KnowledgeBase kb = new KnowledgeBase(); + + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + kb.addStatements(factPc); + reasoner.reason(); + + // TODO mock file or something + String csvFilePath = ".csv"; + Correctness correctness = reasoner.exportQueryAnswersToCsv(ruleHeadQx, csvFilePath, true); + + assertEquals(Correctness.SOUND_AND_COMPLETE, correctness); + } + + } + + @Test + public void exportQueryAnswersToCsv_PredicateNotLoaded_KbChanged() throws IOException { + KnowledgeBase kb = new KnowledgeBase(); + + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + kb.addStatements(factPc); + + // TODO mock file or something + String csvFilePath = ".csv"; + Correctness correctness = reasoner.exportQueryAnswersToCsv(ruleBodyPx, csvFilePath, true); + + assertEquals(Correctness.SOUND_BUT_INCOMPLETE, correctness); + } + } + @Test public void testMaterialisationIncomplete() throws IOException { final Variable y = Expressions.makeUniversalVariable("y"); From 4ed5e084fb368ce329d6a07e5451120fe4779f7c Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Thu, 13 Aug 2020 17:54:56 +0200 Subject: [PATCH 0647/1003] make ReasonerStateException message for querying before reason() more explicit --- .../org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java index 3340b0bd3..f49bfef7a 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java @@ -515,7 +515,7 @@ public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final St private void validateBeforeQuerying(final PositiveLiteral query) { this.validateNotClosed(); if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { - throw new ReasonerStateException(this.reasonerState, "Querying is not allowed before reasoner is loaded!"); + throw new ReasonerStateException(this.reasonerState, "Querying is not allowed before Reasoner#reason() was first called!"); } Validate.notNull(query, "Query atom must not be null!"); } From 1feedb724c7eb97edb818d66cb338518cc75ea13 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 18 Aug 2020 17:14:23 +0200 Subject: [PATCH 0648/1003] Remove support for String and Iri from Argument --- .../commands/LoadCommandInterpreter.java | 12 +- .../rulewerk/core/model/api/Argument.java | 128 ++---------------- .../rulewerk/core/model/api/Command.java | 2 - .../rulewerk/core/model/ArgumentTest.java | 50 +++---- .../rulewerk/examples/ExamplesUtils.java | 2 +- .../rulewerk/parser/DirectiveHandler.java | 58 ++------ .../rulewerk/parser/javacc/JavaCCParser.jj | 15 +- .../rulewerk/parser/CommandParserTest.java | 15 +- .../rulewerk/parser/DirectiveHandlerTest.java | 69 ++-------- 9 files changed, 87 insertions(+), 264 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index b52d078de..dde5e9d18 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -24,6 +24,7 @@ import java.io.FileNotFoundException; import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.Terms; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; @@ -31,8 +32,15 @@ public class LoadCommandInterpreter implements CommandInterpreter { @Override public void run(Command command, Interpreter interpreter) throws CommandExecutionException { - if (command.getArguments().size() == 1 && command.getArguments().get(0).fromString().isPresent()) { - String fileName = command.getArguments().get(0).fromString().get(); + if (command.getArguments().size() == 1) { + String fileName; + try { + fileName = Terms.extractString( + command.getArguments().get(0).fromTerm().orElseThrow(() -> new CommandExecutionException( + "Expected string for file name, but did not find a term."))); + } catch (IllegalArgumentException e) { + throw new CommandExecutionException("Failed to convert term given for file name to string."); + } try { FileInputStream fileInputStream = new FileInputStream(fileName); RuleParser.parseInto(interpreter.getReasoner().getKnowledgeBase(), fileInputStream); diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java index 7fe488150..27604edb3 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java @@ -20,7 +20,6 @@ * #L% */ -import java.net.URI; import java.util.Optional; import java.util.function.Function; @@ -37,16 +36,13 @@ private Argument() { /** * Apply a function to the contained value. * - * @param stringHandler the function to apply to a string argument - * @param iriHandler the function to apply to an IRI * @param termHandler the function to apply to a Term * @param ruleHandler the function to apply to a Rule * @param positiveLiteralHandler the function to apply to a Literal * * @return the value returned by the appropriate handler function */ - public abstract V apply(Function stringHandler, - Function iriHandler, Function termHandler, + public abstract V apply(Function termHandler, Function ruleHandler, Function positiveLiteralHandler); @@ -76,80 +72,6 @@ protected Optional isEqual(Object other) { return Optional.empty(); } - /** - * Create an argument containing a String. - * - * @param value the string value - * - * @return An argument containing the given string value - */ - public static Argument string(String value) { - return new Argument() { - @Override - public V apply(Function stringHandler, - Function iriHandler, Function termHandler, - Function ruleHandler, - Function positiveLiteralHandler) { - return stringHandler.apply(value); - } - - @Override - public boolean equals(Object other) { - Optional maybeEquals = isEqual(other); - - if (maybeEquals.isPresent()) { - return maybeEquals.get(); - } - - Argument otherArgument = (Argument) other; - return otherArgument.apply(str -> str.equals(value), iri -> false, term -> false, rule -> false, - positiveLiteral -> false); - } - - @Override - public int hashCode() { - return 41 * value.hashCode(); - } - }; - } - - /** - * Create an argument containing a IRI. - * - * @param value the IRI value - * - * @return An argument containing the given IRI value - */ - public static Argument iri(URI value) { - return new Argument() { - @Override - public V apply(Function stringHandler, - Function iriHandler, Function termHandler, - Function ruleHandler, - Function positiveLiteralHandler) { - return iriHandler.apply(value); - } - - @Override - public boolean equals(Object other) { - Optional maybeEquals = isEqual(other); - - if (maybeEquals.isPresent()) { - return maybeEquals.get(); - } - - Argument otherArgument = (Argument) other; - return otherArgument.apply(str -> false, iri -> iri.equals(value), term -> false, rule -> false, - positiveLiteral -> false); - } - - @Override - public int hashCode() { - return 43 * value.hashCode(); - } - }; - } - /** * Create an argument containing a Term. * @@ -160,8 +82,7 @@ public int hashCode() { public static Argument term(Term value) { return new Argument() { @Override - public V apply(Function stringHandler, - Function iriHandler, Function termHandler, + public V apply(Function termHandler, Function ruleHandler, Function positiveLiteralHandler) { return termHandler.apply(value); @@ -176,8 +97,7 @@ public boolean equals(Object other) { } Argument otherArgument = (Argument) other; - return otherArgument.apply(str -> false, iri -> false, term -> term.equals(value), rule -> false, - positiveLiteral -> false); + return otherArgument.apply(term -> term.equals(value), rule -> false, positiveLiteral -> false); } @Override @@ -197,8 +117,7 @@ public int hashCode() { public static Argument rule(Rule value) { return new Argument() { @Override - public V apply(Function stringHandler, - Function iriHandler, Function termHandler, + public V apply(Function termHandler, Function ruleHandler, Function positiveLiteralHandler) { return ruleHandler.apply(value); @@ -213,8 +132,7 @@ public boolean equals(Object other) { } Argument otherArgument = (Argument) other; - return otherArgument.apply(str -> false, iri -> false, term -> false, rule -> rule.equals(value), - positiveLiteral -> false); + return otherArgument.apply(term -> false, rule -> rule.equals(value), positiveLiteral -> false); } @Override @@ -234,8 +152,7 @@ public int hashCode() { public static Argument positiveLiteral(PositiveLiteral value) { return new Argument() { @Override - public V apply(Function stringHandler, - Function iriHandler, Function termHandler, + public V apply(Function termHandler, Function ruleHandler, Function positiveLiteralHandler) { return positiveLiteralHandler.apply(value); @@ -250,7 +167,7 @@ public boolean equals(Object other) { } Argument otherArgument = (Argument) other; - return otherArgument.apply(str -> false, iri -> false, term -> false, rule -> false, + return otherArgument.apply(term -> false, rule -> false, positiveLiteral -> positiveLiteral.equals(value)); } @@ -261,28 +178,6 @@ public int hashCode() { }; } - /** - * Create an optional from a (possible) string value. - * - * @return An optional containing the contained string, or an empty Optional if - * the argument doesn't contain a string. - */ - public Optional fromString() { - return this.apply(Optional::of, value -> Optional.empty(), value -> Optional.empty(), value -> Optional.empty(), - value -> Optional.empty()); - } - - /** - * Create an optional from a (possible) IRI value. - * - * @return An optional containing the contained IRI, or an empty Optional if the - * argument doesn't contain a IRI. - */ - public Optional fromIri() { - return this.apply(value -> Optional.empty(), Optional::of, value -> Optional.empty(), value -> Optional.empty(), - value -> Optional.empty()); - } - /** * Create an optional from a (possible) Term value. * @@ -290,8 +185,7 @@ public Optional fromIri() { * the argument doesn't contain a Term. */ public Optional fromTerm() { - return this.apply(value -> Optional.empty(), value -> Optional.empty(), Optional::of, value -> Optional.empty(), - value -> Optional.empty()); + return this.apply(Optional::of, value -> Optional.empty(), value -> Optional.empty()); } /** @@ -301,8 +195,7 @@ public Optional fromTerm() { * the argument doesn't contain a Rule. */ public Optional fromRule() { - return this.apply(value -> Optional.empty(), value -> Optional.empty(), value -> Optional.empty(), Optional::of, - value -> Optional.empty()); + return this.apply(value -> Optional.empty(), Optional::of, value -> Optional.empty()); } /** @@ -312,7 +205,6 @@ public Optional fromRule() { * Optional if the argument doesn't contain a PositiveLitreal. */ public Optional fromPositiveLiteral() { - return this.apply(value -> Optional.empty(), value -> Optional.empty(), value -> Optional.empty(), - value -> Optional.empty(), Optional::of); + return this.apply(value -> Optional.empty(), value -> Optional.empty(), Optional::of); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java index 1634ae277..3af723089 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java @@ -76,8 +76,6 @@ public String getSyntacticRepresentation() { .append(Serializer.getString(rule.getBody())); } else if (argument.fromPositiveLiteral().isPresent()) { result.append(argument.fromPositiveLiteral().get().getSyntacticRepresentation()); - } else if (argument.fromString().isPresent()) { - result.append(Serializer.getString(argument.fromString().get())); } else { throw new UnsupportedOperationException("Serialisation of commands is not fully implemented yet."); } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ArgumentTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ArgumentTest.java index bcec475d1..76efe55d4 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ArgumentTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ArgumentTest.java @@ -21,65 +21,67 @@ */ import static org.junit.Assert.*; -import java.net.URI; - import org.junit.Test; import org.semanticweb.rulewerk.core.model.api.Argument; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Rule; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.Expressions; public class ArgumentTest { - private static final String STRING = "src/test/resources/facts.rls"; - private static final URI IRI = URI.create("https://example.org"); - private static final Term TERM = Expressions.makeDatatypeConstant(STRING, IRI.toString()); + private static final Term TERM = Expressions.makeDatatypeConstant("some string", + PrefixDeclarationRegistry.XSD_STRING); + private static final PositiveLiteral LITERAL = Expressions.makePositiveLiteral("p", TERM); + private static final Rule RULE = Expressions.makeRule(LITERAL, LITERAL); - private static final Argument STRING_ARGUMENT = Argument.string(STRING); - private static final Argument IRI_ARGUMENT = Argument.iri(IRI); private static final Argument TERM_ARGUMENT = Argument.term(TERM); + private static final Argument LITERAL_ARGUMENT = Argument.positiveLiteral(LITERAL); + private static final Argument RULE_ARGUMENT = Argument.rule(RULE); @Test public void equals_null_returnsFalse() { - assertFalse(STRING_ARGUMENT.equals(null)); - assertFalse(IRI_ARGUMENT.equals(null)); + assertFalse(LITERAL_ARGUMENT.equals(null)); + assertFalse(RULE_ARGUMENT.equals(null)); assertFalse(TERM_ARGUMENT.equals(null)); } @Test public void equals_self_returnsTrue() { - assertTrue(STRING_ARGUMENT.equals(STRING_ARGUMENT)); - assertTrue(IRI_ARGUMENT.equals(IRI_ARGUMENT)); + assertTrue(RULE_ARGUMENT.equals(RULE_ARGUMENT)); + assertTrue(LITERAL_ARGUMENT.equals(LITERAL_ARGUMENT)); assertTrue(TERM_ARGUMENT.equals(TERM_ARGUMENT)); } @Test public void equals_equal_returnsTrue() { - assertTrue(STRING_ARGUMENT.equals(Argument.string(STRING))); - assertTrue(IRI_ARGUMENT.equals(Argument.iri(IRI))); + assertTrue(RULE_ARGUMENT.equals(Argument.rule(RULE))); + assertTrue(LITERAL_ARGUMENT.equals(Argument.positiveLiteral(LITERAL))); assertTrue(TERM_ARGUMENT.equals(Argument.term(TERM))); } @Test public void equals_notEqualButSameType_returnsFalse() { - assertFalse(STRING_ARGUMENT.equals(Argument.string(STRING + "test"))); - assertFalse(IRI_ARGUMENT.equals(Argument.iri(URI.create("https://example.com")))); + assertFalse(RULE_ARGUMENT.equals(Argument.rule(Expressions.makeRule(LITERAL, LITERAL, LITERAL)))); + assertFalse(LITERAL_ARGUMENT.equals(Argument.positiveLiteral(Expressions.makePositiveLiteral("q", TERM)))); assertFalse(TERM_ARGUMENT - .equals(Argument.term(Expressions.makeDatatypeConstant(STRING, "https://example.com")))); + .equals(Argument.term(Expressions.makeDatatypeConstant("another string", "https://example.com")))); } @Test public void equals_differentType_returnsFalse() { - assertFalse(STRING_ARGUMENT.equals(IRI_ARGUMENT)); - assertFalse(STRING_ARGUMENT.equals(TERM_ARGUMENT)); - assertFalse(IRI_ARGUMENT.equals(STRING_ARGUMENT)); - assertFalse(IRI_ARGUMENT.equals(TERM_ARGUMENT)); - assertFalse(TERM_ARGUMENT.equals(STRING_ARGUMENT)); - assertFalse(TERM_ARGUMENT.equals(IRI_ARGUMENT)); + assertFalse(RULE_ARGUMENT.equals(LITERAL_ARGUMENT)); + assertFalse(RULE_ARGUMENT.equals(TERM_ARGUMENT)); + assertFalse(LITERAL_ARGUMENT.equals(RULE_ARGUMENT)); + assertFalse(LITERAL_ARGUMENT.equals(TERM_ARGUMENT)); + assertFalse(TERM_ARGUMENT.equals(RULE_ARGUMENT)); + assertFalse(TERM_ARGUMENT.equals(LITERAL_ARGUMENT)); } @Test public void equals_String_returnsFalse() { - assertFalse(STRING_ARGUMENT.equals((Object) "test")); - assertFalse(IRI_ARGUMENT.equals((Object) "test")); + assertFalse(RULE_ARGUMENT.equals((Object) "test")); + assertFalse(LITERAL_ARGUMENT.equals((Object) "test")); assertFalse(TERM_ARGUMENT.equals((Object) "test")); } } diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java index a0801f415..87745d975 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java @@ -73,7 +73,7 @@ public static void configureLogging() { final String pattern = "%d{yyyy-MM-dd HH:mm:ss} %-5p - %m%n"; consoleAppender.setLayout(new PatternLayout(pattern)); // Change to Level.ERROR for fewer messages: - consoleAppender.setThreshold(Level.INFO); + consoleAppender.setThreshold(Level.ERROR); consoleAppender.activateOptions(); Logger.getRootLogger().addAppender(consoleAppender); diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java index 1be48eba7..2a8de3aa9 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java @@ -21,15 +21,13 @@ */ import java.io.File; -import java.net.MalformedURLException; -import java.net.URI; -import java.net.URL; import java.nio.file.InvalidPathException; import java.util.List; import org.semanticweb.rulewerk.core.model.api.Argument; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Terms; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.parser.javacc.JavaCCParser; import org.semanticweb.rulewerk.parser.javacc.SubParserFactory; @@ -54,8 +52,7 @@ public interface DirectiveHandler { * directive, or the number of arguments is invalid. * @return a {@code T} instance corresponding to the given arguments. */ - public T handleDirective(List arguments, final SubParserFactory subParserFactory) - throws ParsingException; + public T handleDirective(List arguments, final SubParserFactory subParserFactory) throws ParsingException; /** * Validate the provided number of arguments to the directive statement. @@ -87,8 +84,12 @@ public static void validateNumberOfArguments(final List arguments, fin */ public static String validateStringArgument(final Argument argument, final String description) throws ParsingException { - return argument.fromString() - .orElseThrow(() -> new ParsingException("description \"" + argument + "\" is not a string.")); + try { + return Terms.extractString(argument.fromTerm().orElseThrow( + () -> new ParsingException("Expected string for " + description + ", but did not find a term."))); + } catch (IllegalArgumentException e) { + throw new ParsingException("Failed to convert term given for " + description + " to string."); + } } /** @@ -110,50 +111,12 @@ public static File validateFilenameArgument(final Argument argument, final Strin // we don't care about the actual path, just that there is one. file.toPath(); } catch (InvalidPathException e) { - throw new ParsingException(description + "\"" + argument + "\" is not a valid file path.", e); + throw new ParsingException(description + "\"" + fileName + "\" is not a valid file path.", e); } return file; } - /** - * Validate that the provided argument is an IRI. - * - * @param argument the argument to validate - * @param description a description of the argument, used in constructing the - * error message. - * - * @throws ParsingException when the given argument is not an IRI. - * - * @return the contained IRI. - */ - public static URI validateIriArgument(final Argument argument, final String description) - throws ParsingException { - return argument.fromIri() - .orElseThrow(() -> new ParsingException(description + "\"" + argument + "\" is not an IRI.")); - } - - /** - * Validate that the provided argument is a {@link URL}. - * - * @param argument the argument to validate - * @param description a description of the argument, used in constructing the - * error message. - * - * @throws ParsingException when the given argument is not a valid {@link URL}. - * - * @return the {@link URL} corresponding to the contained IRI. - */ - public static URL validateUrlArgument(final Argument argument, final String description) - throws ParsingException { - URI iri = DirectiveHandler.validateIriArgument(argument, description); - try { - return iri.toURL(); - } catch (MalformedURLException e) { - throw new ParsingException(description + "\"" + argument + "\" is not a valid URL.", e); - } - } - /** * Validate that the provided argument is a {@link Term}. * @@ -165,8 +128,7 @@ public static URL validateUrlArgument(final Argument argument, final String desc * * @return the contained {@link Term}. */ - public static Term validateTermArgument(final Argument argument, final String description) - throws ParsingException { + public static Term validateTermArgument(final Argument argument, final String description) throws ParsingException { return argument.fromTerm() .orElseThrow(() -> new ParsingException(description + "\"" + argument + "\" is not a Term.")); } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj index 59e2c3f85..ef20b419f 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj @@ -57,11 +57,10 @@ import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.DataSource; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Argument; +import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.script.Argument; -import org.semanticweb.rulewerk.core.script.Command; - import org.semanticweb.rulewerk.core.model.implementation.Expressions; public class JavaCCParser extends JavaCCParserBase @@ -455,16 +454,6 @@ LinkedList< Argument > Arguments() throws PrefixDeclarationException : { } { ( LOOKAHEAD(ruleNoDot()) rule = ruleNoDot() { argument = Argument.rule(rule); } | LOOKAHEAD(positiveLiteral(FormulaContext.HEAD)) positiveLiteral = positiveLiteral(FormulaContext.HEAD) { argument = Argument.positiveLiteral(positiveLiteral); } - | LOOKAHEAD(String()) str = String() { argument = Argument.string(str); } - | LOOKAHEAD(absoluteIri()) str = absoluteIri() { - URI url; - try { - url = new URI(str); - } catch (URISyntaxException e) { - throw makeParseExceptionWithCause("Error parsing IRIhandler: " + e.getMessage(), e); - } - argument = Argument.iri(url); - } | t = term(FormulaContext.HEAD) { argument = Argument.term(t); } ) [rest = Arguments()] { rest.addFirst(argument); diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java index f81d83088..027cff739 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java @@ -23,6 +23,7 @@ import static org.junit.Assert.*; import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Argument; import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.parser.javacc.JavaCCParser; @@ -35,9 +36,19 @@ public void parseCommand() throws ParsingException { assertEquals("query", command.getName()); assertEquals(5, command.getArguments().size()); assertTrue(command.getArguments().get(0).fromRule().isPresent()); - assertTrue(command.getArguments().get(1).fromString().isPresent()); + assertTrue(command.getArguments().get(1).fromTerm().isPresent()); assertTrue(command.getArguments().get(2).fromTerm().isPresent()); assertTrue(command.getArguments().get(3).fromPositiveLiteral().isPresent()); - assertTrue(command.getArguments().get(4).fromIri().isPresent()); + assertTrue(command.getArguments().get(4).fromTerm().isPresent()); } + +// @Test +// public void parseCommandTest() throws ParsingException { +// String input = "@myprefix wdqs: ."; +//// String input = "@mysource diseaseId[2]: sparql(wdqs:sparql, \"disease,doid\", \"?disease wdt:P699 ?doid .\") ."; +// Command command = RuleParser.parseSyntaxFragment(input, JavaCCParser::command, "command", null); +// for (Argument argument : command.getArguments()) { +// System.out.println("-"); +// } +// } } diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java index 725d54dc1..66f89562a 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java @@ -22,87 +22,48 @@ import static org.junit.Assert.*; import java.io.File; -import java.net.MalformedURLException; -import java.net.URI; import org.junit.Test; import org.semanticweb.rulewerk.core.model.api.Argument; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.Expressions; public class DirectiveHandlerTest { private static final String STRING = "src/test/resources/facts.rls"; - private static final URI IRI = URI.create("https://example.org"); - private static final Term TERM = Expressions.makeDatatypeConstant(STRING, IRI.toString()); + private static final Term STRINGTERM = Expressions.makeDatatypeConstant(STRING, + PrefixDeclarationRegistry.XSD_STRING); + private static final Term INTTERM = Expressions.makeDatatypeConstant("42", PrefixDeclarationRegistry.XSD_INT); - private static final Argument STRING_ARGUMENT = Argument.string(STRING); - private static final Argument IRI_ARGUMENT = Argument.iri(IRI); - private static final Argument TERM_ARGUMENT = Argument.term(TERM); + private static final Argument TERM_STRING_ARGUMENT = Argument.term(STRINGTERM); + private static final Argument TERM_INT_ARGUMENT = Argument.term(INTTERM); @Test public void validateStringArgument_stringArgument_succeeds() throws ParsingException { - assertEquals(STRING, DirectiveHandler.validateStringArgument(STRING_ARGUMENT, "string argument")); + assertEquals(STRING, DirectiveHandler.validateStringArgument(TERM_STRING_ARGUMENT, "string argument")); } - - @Test(expected = ParsingException.class) - public void validateStringArgument_iriArgument_throws() throws ParsingException { - DirectiveHandler.validateStringArgument(IRI_ARGUMENT, "string argument"); - } - - @Test(expected = ParsingException.class) - public void validateStringArgument_termArgument_throws() throws ParsingException { - DirectiveHandler.validateStringArgument(TERM_ARGUMENT, "string argument"); - } - - @Test - public void validateIriArgument_iriArgument_succeeds() throws ParsingException { - assertEquals(IRI, DirectiveHandler.validateIriArgument(IRI_ARGUMENT, "iri argument")); - } - - @Test(expected = ParsingException.class) - public void validateIriArgument_StringArgument_throws() throws ParsingException { - DirectiveHandler.validateIriArgument(STRING_ARGUMENT, "iri argument"); - } - + @Test(expected = ParsingException.class) - public void validateIriArgument_termArgument_throws() throws ParsingException { - DirectiveHandler.validateIriArgument(TERM_ARGUMENT, "iri argument"); + public void validateStringArgument_stringArgument_throws() throws ParsingException { + assertEquals(STRING, DirectiveHandler.validateStringArgument(TERM_INT_ARGUMENT, "string argument")); } @Test public void validateTermArgument_termArgument_succeeds() throws ParsingException { - assertEquals(TERM, DirectiveHandler.validateTermArgument(TERM_ARGUMENT, "term argument")); - } - - @Test(expected = ParsingException.class) - public void validateTermArgument_stringArgument_throws() throws ParsingException { - DirectiveHandler.validateTermArgument(STRING_ARGUMENT, "term argument"); - } - - @Test(expected = ParsingException.class) - public void validateTermArgument_iriArgument_throws() throws ParsingException { - DirectiveHandler.validateTermArgument(IRI_ARGUMENT, "term argument"); + assertEquals(STRINGTERM, DirectiveHandler.validateTermArgument(TERM_STRING_ARGUMENT, "term argument")); } @Test public void validateFilenameArgument_filename_succeeds() throws ParsingException { - assertEquals(new File(STRING), DirectiveHandler.validateFilenameArgument(STRING_ARGUMENT, "filename argument")); + assertEquals(new File(STRING), + DirectiveHandler.validateFilenameArgument(TERM_STRING_ARGUMENT, "filename argument")); } @Test public void validateFilenameArgument_invalidFilename_throws() throws ParsingException { - DirectiveHandler.validateFilenameArgument(Argument.string(STRING + "-nonexistant"), + DirectiveHandler.validateFilenameArgument(Argument + .term(Expressions.makeDatatypeConstant(STRING + "-nonexistent", PrefixDeclarationRegistry.XSD_STRING)), "filename argument"); } - @Test - public void validateUrlArgument_url_succeeds() throws ParsingException, MalformedURLException { - assertEquals(IRI.toURL(), DirectiveHandler.validateUrlArgument(IRI_ARGUMENT, "urls argument")); - } - - @Test(expected = ParsingException.class) - public void validateUrlArgument_invalidUrl_throws() throws ParsingException { - DirectiveHandler.validateUrlArgument(Argument.iri(URI.create("example://test")), "url argument"); - } - } From aca91129a212d83ec6ef3343995af92990ee841b Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 18 Aug 2020 17:14:47 +0200 Subject: [PATCH 0649/1003] improved output --- .../rulewerk/commands/ReasonCommandInterpreter.java | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java index 13753e8f4..6ecfd8944 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java @@ -34,6 +34,8 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio throw new CommandExecutionException("This command supports no arguments."); } + interpreter.getOut().println("Loading and materializing inferences ..."); + Timer timer = new Timer("reasoning"); timer.start(); try { @@ -42,8 +44,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio throw new CommandExecutionException(e.getMessage(), e); } timer.stop(); - interpreter.getOut() - .println("Loading and materialization finished in " + timer.getTotalCpuTime() / 1000000 + "ms."); + interpreter.getOut().println("... finished in " + timer.getTotalCpuTime() / 1000000 + "ms."); } @Override @@ -55,5 +56,5 @@ public String getHelp(String commandName) { public String getSynopsis() { return "load data and compute conclusions from knowledge base"; } - + } From 49cabe7dfa94c9141a300c506a0d908c53c5b6e9 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 18 Aug 2020 18:07:00 +0200 Subject: [PATCH 0650/1003] support source and prefix like command arguments --- .../rulewerk/core/model/api/Argument.java | 17 +++++++++ .../rulewerk/parser/javacc/JavaCCParser.jj | 23 +++++++----- .../rulewerk/parser/CommandParserTest.java | 35 +++++++++++++------ 3 files changed, 57 insertions(+), 18 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java index 27604edb3..dfbd0c771 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java @@ -23,6 +23,8 @@ import java.util.Optional; import java.util.function.Function; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; + /** * A tagged union representing the possible types allowed to appear as arguments * in commands and parser directives. @@ -104,6 +106,11 @@ public boolean equals(Object other) { public int hashCode() { return 47 * value.hashCode(); } + + @Override + public String toString() { + return value.toString(); + } }; } @@ -139,6 +146,11 @@ public boolean equals(Object other) { public int hashCode() { return 53 * value.hashCode(); } + + @Override + public String toString() { + return value.toString(); + } }; } @@ -175,6 +187,11 @@ public boolean equals(Object other) { public int hashCode() { return 59 * value.hashCode(); } + + @Override + public String toString() { + return value.toString(); + } }; } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj index ef20b419f..e73b4b222 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj @@ -160,16 +160,23 @@ KnowledgeBase directive() throws PrefixDeclarationException : { } Command command() throws PrefixDeclarationException : { - Token name; - List< Argument > arguments; + Token name, pn, arity; + LinkedList< Argument > arguments; + String predicateName; + } { name = < CUSTOM_DIRECTIVE > - ( arguments = Arguments() < DOT > { - return new Command(name.image,arguments); - } | - < DOT > { - return new Command(name.image, new LinkedList< Argument >()); - } ) + ( LOOKAHEAD(predicateName() < ARITY >) predicateName = predicateName() arity = < ARITY > < COLON > arguments = Arguments() < DOT > { + arguments.addFirst(Argument.term(Expressions.makeDatatypeConstant(predicateName + "[" + arity.image + "]:", PrefixDeclarationRegistry.XSD_STRING))); + return new Command(name.image,arguments); + } + | arguments = Arguments() < DOT > { return new Command(name.image,arguments); } + | pn = < PNAME_NS > arguments = Arguments() < DOT > { + arguments.addFirst(Argument.term(Expressions.makeDatatypeConstant(pn.image, PrefixDeclarationRegistry.XSD_STRING))); + return new Command(name.image,arguments); + } + | < DOT > { return new Command(name.image, new LinkedList< Argument >()); } + ) } void statement() throws PrefixDeclarationException : { diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java index 027cff739..3381f05cc 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java @@ -22,9 +22,12 @@ import static org.junit.Assert.*; +import java.net.URI; +import java.net.URISyntaxException; + import org.junit.Test; -import org.semanticweb.rulewerk.core.model.api.Argument; import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.Terms; import org.semanticweb.rulewerk.parser.javacc.JavaCCParser; public class CommandParserTest { @@ -41,14 +44,26 @@ public void parseCommand() throws ParsingException { assertTrue(command.getArguments().get(3).fromPositiveLiteral().isPresent()); assertTrue(command.getArguments().get(4).fromTerm().isPresent()); } + + @Test + public void parsePrefix() throws ParsingException, URISyntaxException { + String input = "@myprefix wdqs: ."; + Command command = RuleParser.parseSyntaxFragment(input, JavaCCParser::command, "command", null); + assertEquals(2, command.getArguments().size()); + assertTrue(command.getArguments().get(0).fromTerm().isPresent()); + assertTrue(command.getArguments().get(1).fromTerm().isPresent()); + assertEquals("wdqs:", Terms.extractString(command.getArguments().get(0).fromTerm().get())); + assertEquals(new URI("https://query.wikidata.org/"), Terms.extractIri(command.getArguments().get(1).fromTerm().get())); + } -// @Test -// public void parseCommandTest() throws ParsingException { -// String input = "@myprefix wdqs: ."; -//// String input = "@mysource diseaseId[2]: sparql(wdqs:sparql, \"disease,doid\", \"?disease wdt:P699 ?doid .\") ."; -// Command command = RuleParser.parseSyntaxFragment(input, JavaCCParser::command, "command", null); -// for (Argument argument : command.getArguments()) { -// System.out.println("-"); -// } -// } + @Test + public void parseSourceDeclaration() throws ParsingException, URISyntaxException { + String input = "@mysource diseaseId[2]: 123 ."; + Command command = RuleParser.parseSyntaxFragment(input, JavaCCParser::command, "command", null); + assertEquals(2, command.getArguments().size()); + assertTrue(command.getArguments().get(0).fromTerm().isPresent()); + assertTrue(command.getArguments().get(1).fromTerm().isPresent()); + assertEquals("diseaseId[2]:", Terms.extractString(command.getArguments().get(0).fromTerm().get())); + assertEquals(123, Terms.extractInt(command.getArguments().get(1).fromTerm().get())); + } } From 171af2fe511815e42a4cf8dbebc4c0efe706cc60 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 21 Aug 2020 01:13:00 +0200 Subject: [PATCH 0651/1003] added shell code --- pom.xml | 2 + rulewerk-client/pom.xml | 32 +++++- .../rulewerk/client/shell/CommandReader.java | 90 ++++++++++++++++ .../client/shell/DefaultConfiguration.java | 60 +++++++++++ .../rulewerk/client/shell/PromptProvider.java | 28 +++++ .../client/shell/RulewerkApplication.java | 40 +++++++ .../rulewerk/client/shell/Shell.java | 102 ++++++++++++++++++ .../commands/ExitCommandInterpreter.java | 66 ++++++++++++ 8 files changed, 415 insertions(+), 5 deletions(-) create mode 100644 rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java create mode 100644 rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java create mode 100644 rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/PromptProvider.java create mode 100644 rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java create mode 100644 rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java create mode 100644 rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java diff --git a/pom.xml b/pom.xml index 17fe39ba5..bd2ca0a0d 100644 --- a/pom.xml +++ b/pom.xml @@ -87,6 +87,8 @@ 1.3.1 4.0.4 3.2.0 + 3.16.0 + 1.18 diff --git a/rulewerk-client/pom.xml b/rulewerk-client/pom.xml index 64f043d72..98bca4d59 100644 --- a/rulewerk-client/pom.xml +++ b/rulewerk-client/pom.xml @@ -31,11 +31,16 @@ rulewerk-parser ${project.version} - - ${project.groupId} - rulewerk-vlog - ${project.version} - + + ${project.groupId} + rulewerk-vlog + ${project.version} + + + ${project.groupId} + rulewerk-commands + ${project.version} + org.slf4j slf4j-log4j12 @@ -51,6 +56,23 @@ maven-shade-plugin ${shade.version} + + + + org.jline + jline + ${jline.version} + + + org.fusesource.jansi + jansi + ${jansi.version} + + + org.jline + jline-terminal-jansi + ${jline.version} + diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java new file mode 100644 index 000000000..ea34fc323 --- /dev/null +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java @@ -0,0 +1,90 @@ +package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.jline.reader.LineReader; +import org.jline.reader.UserInterruptException; +import org.jline.utils.AttributedString; +import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter; +import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter.ExitCommandName; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; + +public class CommandReader { + + public CommandReader(final LineReader lineReader, final PromptProvider promptProvider) { + super(); + this.lineReader = lineReader; + this.promptProvider = promptProvider; + } + + private final LineReader lineReader; + + private final PromptProvider promptProvider; + + public Command readCommand() { + final String readLine; + try { + final AttributedString prompt = this.promptProvider.getPrompt(); + readLine = this.lineReader.readLine(prompt.toAnsi(this.lineReader.getTerminal())); + + } catch (final UserInterruptException e) { + if (e.getPartialLine().isEmpty()) { + // Exit request from user CTRL+C + return ExitCommandInterpreter.EXIT_COMMAND; + } else { + // TODO maybe create empty command + return null; + } + } + // TODO can readLIne be null? + + // TODO does it trim trailing spaces? + if (ExitCommandName.isExitCommand(readLine)) { + return ExitCommandInterpreter.EXIT_COMMAND; + } + + try { + return RuleParser.parseCommand(readLine); + } catch (final ParsingException e) { + // FIXME do I need to flush terminal? + // TODO improve error message + this.lineReader.getTerminal().writer().println("Command cannot be parsed: " + e.getMessage()); + // return Input.EMPTY; + // TODO maybe create empty command + return null; + } + } + +// /** +// * Sanitize the buffer input given the customizations applied to the JLine +// * parser (e.g. support for line continuations, etc.) +// */ +// static List sanitizeInput(List words) { +// words = words.stream().map(s -> s.replaceAll("^\\n+|\\n+$", "")) // CR at beginning/end of line introduced by +// // backslash continuation +// .map(s -> s.replaceAll("\\n+", " ")) // CR in middle of word introduced by return inside a quoted string +// .collect(Collectors.toList()); +// return words; +// } + +} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java new file mode 100644 index 000000000..2b4085ac5 --- /dev/null +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java @@ -0,0 +1,60 @@ +package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.jline.reader.LineReader; +import org.jline.reader.LineReaderBuilder; +import org.jline.terminal.Terminal; +import org.jline.terminal.TerminalBuilder; +import org.jline.utils.AttributedString; +import org.jline.utils.AttributedStyle; + +public final class DefaultConfiguration { + + private DefaultConfiguration() { + } + + public static PromptProvider buildPromptProvider() { + return () -> new AttributedString("rulewerk>", AttributedStyle.DEFAULT.foreground(AttributedStyle.YELLOW)); + } + + public static LineReader buildLineReader(final Terminal terminal) { + final LineReaderBuilder lineReaderBuilder = LineReaderBuilder.builder().terminal(terminal) + .appName("Rulewerk Shell") + // .expander(expander()) + // .completer(buildCompleter()) + // .history(buildHistory()) + // .highlighter(buildHighlighter()) + ; + + final LineReader lineReader = lineReaderBuilder.build(); + lineReader.unsetOpt(LineReader.Option.INSERT_TAB); // This allows completion on an empty buffer, rather than + // inserting a tab + return lineReader; + } + + public static Terminal buildTerminal() throws IOException { + return TerminalBuilder.builder().dumb(true).jansi(true).jna(false).system(true).build(); + } + +} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/PromptProvider.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/PromptProvider.java new file mode 100644 index 000000000..ff5fd6ea4 --- /dev/null +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/PromptProvider.java @@ -0,0 +1,28 @@ +package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.jline.utils.AttributedString; + +public interface PromptProvider { + + AttributedString getPrompt(); +} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java new file mode 100644 index 000000000..4cee43296 --- /dev/null +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java @@ -0,0 +1,40 @@ +package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.jline.reader.LineReader; +import org.jline.terminal.Terminal; + +public class RulewerkApplication { + + public static void main(final String[] args) throws IOException { + final Terminal terminal = DefaultConfiguration.buildTerminal(); + final Shell shell = new Shell(terminal); + final LineReader lineReader = DefaultConfiguration.buildLineReader(terminal); + final PromptProvider promptProvider = DefaultConfiguration.buildPromptProvider(); + final CommandReader commandReader = new CommandReader(lineReader, promptProvider); + + shell.run(commandReader); + } + +} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java new file mode 100644 index 000000000..41f309f20 --- /dev/null +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -0,0 +1,102 @@ +package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.PrintStream; + +import org.jline.terminal.Terminal; +import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter; +import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter.ExitCommandName; +import org.semanticweb.rulewerk.commands.CommandExecutionException; +import org.semanticweb.rulewerk.commands.Interpreter; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; +import org.slf4j.Logger; + +public class Shell { + + private final Terminal terminal; + + private final Interpreter interpreter; + + public Shell(final Terminal terminal) { + this.terminal = terminal; + this.interpreter = this.initializeInterpreter(); + } + + private Interpreter initializeInterpreter() { + // FIXME connect terminal writer +// final PrintStream out = this.terminal.writer().; + final PrintStream out = System.out; + + // FIXME connect logger; + final Logger logger = null; + // TODO reasoner initial KB from args + final KnowledgeBase knowledgeBase = new KnowledgeBase(); + final Reasoner reasoner = new VLogReasoner(knowledgeBase); + final Interpreter interpreter = new Interpreter(reasoner, out, logger); + + for (final ExitCommandName exitCommandName : ExitCommandName.values()) { + interpreter.registerCommandInterpreter(exitCommandName.toString(), new ExitCommandInterpreter()); + } + + return interpreter; + } + + public void run(final CommandReader commandReader) { + while (true) { + final Command command; + try { + command = commandReader.readCommand(); + } catch (final Exception e) { + // TODO: handle exception + continue; + } + + if (command != null) { + try { + this.interpreter.runCommand(command); + } catch (final CommandExecutionException e) { + // TODO: handle exception + continue; + } + + if (ExitCommandName.isExitCommand(command.getName())) { + break; + } + } + } + } + +// @Override +// public void handleResult(final Object result) { +// this.terminal.writer().println(result); +// this.terminal.writer().flush(); +// } + +// @Override +// public void handleResult(final AttributedCharSequence result) { +// this.terminal.writer().println(result.toAnsi(this.terminal)); +// this.terminal.writer().flush(); +// } +} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java new file mode 100644 index 000000000..13eb81671 --- /dev/null +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java @@ -0,0 +1,66 @@ +package org.semanticweb.rulewerk.client.shell.commands; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.ArrayList; + +import org.semanticweb.rulewerk.commands.CommandExecutionException; +import org.semanticweb.rulewerk.commands.CommandInterpreter; +import org.semanticweb.rulewerk.core.model.api.Command; + +public class ExitCommandInterpreter implements CommandInterpreter { + + public static final Command EXIT_COMMAND = new Command(ExitCommandName.exit.toString(), new ArrayList<>()); + + public static enum ExitCommandName + { + exit, quit; + + public static boolean isExitCommand(final String commandName) { + for(final ExitCommandName name: values()) { + if (name.toString().equals(commandName)) { + return true; + } + } + return false; + } + } + + @Override + public String getHelp(final String commandName) { + // TODO Auto-generated method stub + return null; + } + + @Override + public String getSynopsis() { + // TODO Auto-generated method stub + return null; + } + + @Override + public void run(final Command command, final org.semanticweb.rulewerk.commands.Interpreter interpreter) + throws CommandExecutionException { + // TODO Auto-generated method stub + interpreter.getOut().println("Quiting rulewerk."); + } + +} From 9cc7dd4c8d55f9681d937b238a1cbe9045c88160 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 09:43:39 +0200 Subject: [PATCH 0652/1003] Commands to set sources and prefixes Command parsing needs to use KB prefixes for this to work --- .../rulewerk/commands/Interpreter.java | 62 +++++++++++++- .../commands/SetPrefixCommandInterpreter.java | 53 ++++++++++++ .../commands/SetSourceCommandInterpreter.java | 84 +++++++++++++++++++ .../model/api/PrefixDeclarationRegistry.java | 7 ++ .../rulewerk/core/model/api/Terms.java | 39 +++++---- .../AbstractPrefixDeclarationRegistry.java | 5 ++ .../rulewerk/core/reasoner/KnowledgeBase.java | 12 ++- 7 files changed, 243 insertions(+), 19 deletions(-) create mode 100644 rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java create mode 100644 rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetSourceCommandInterpreter.java diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index 959a0d6d6..404e98e6d 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -25,21 +25,23 @@ import java.util.List; import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Terms; import org.semanticweb.rulewerk.core.reasoner.Reasoner; -import org.slf4j.Logger; +import org.semanticweb.rulewerk.parser.ParserConfiguration; public class Interpreter { final Reasoner reasoner; final PrintStream out; - final Logger logger; + final ParserConfiguration parserConfiguration; final HashMap commandInterpreters = new HashMap<>(); - public Interpreter(Reasoner reasoner, PrintStream out, Logger logger) { + public Interpreter(Reasoner reasoner, PrintStream out, ParserConfiguration parserConfiguration) { this.reasoner = reasoner; this.out = out; - this.logger = logger; + this.parserConfiguration = parserConfiguration; registerDefaultCommandInterpreters(); } @@ -68,6 +70,10 @@ public void runCommand(Command command) throws CommandExecutionException { public Reasoner getReasoner() { return reasoner; } + + public ParserConfiguration getParserConfiguration() { + return parserConfiguration; + } public PrintStream getOut() { return out; @@ -79,6 +85,54 @@ private void registerDefaultCommandInterpreters() { registerCommandInterpreter("query", new QueryCommandInterpreter()); registerCommandInterpreter("reason", new ReasonCommandInterpreter()); registerCommandInterpreter("load", new LoadCommandInterpreter()); + registerCommandInterpreter("setprefix", new SetPrefixCommandInterpreter()); + registerCommandInterpreter("setsource", new SetSourceCommandInterpreter()); + } + + /** + * Validate that the correct number of arguments was passed to a command. + * + * @param command Command to validate + * @param number expected number of parameters + * @throws CommandExecutionException if the number is not correct + */ + public static void validateArgumentCount(Command command, int number) throws CommandExecutionException { + if (command.getArguments().size() != number) { + throw new CommandExecutionException("This command requires exactly " + number + " argument(s), but " + + command.getArguments().size() + " were given."); + } + } + + private static CommandExecutionException getArgumentTypeError(int index, String expectedType, + String parameterName) { + return new CommandExecutionException( + "Argument at position " + index + " needs to be of type " + expectedType + " (" + parameterName + ")."); + } + + public static String extractStringArgument(Command command, int index, String parameterName) + throws CommandExecutionException { + try { + return Terms.extractString(command.getArguments().get(index).fromTerm() + .orElseThrow(() -> getArgumentTypeError(index, "string", parameterName))); + } catch (IllegalArgumentException e) { + throw getArgumentTypeError(index, "string", parameterName); + } + } + + public static String extractNameArgument(Command command, int index, String parameterName) + throws CommandExecutionException { + try { + return Terms.extractName(command.getArguments().get(index).fromTerm() + .orElseThrow(() -> getArgumentTypeError(index, "constant", parameterName))); + } catch (IllegalArgumentException e) { + throw getArgumentTypeError(index, "constant", parameterName); + } + } + + public static PositiveLiteral extractPositiveLiteralArgument(Command command, int index, String parameterName) + throws CommandExecutionException { + return command.getArguments().get(index).fromPositiveLiteral() + .orElseThrow(() -> getArgumentTypeError(index, "literal", parameterName)); } } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java new file mode 100644 index 000000000..7b9427d02 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java @@ -0,0 +1,53 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.model.api.Command; + +public class SetPrefixCommandInterpreter implements CommandInterpreter { + + @Override + public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + Interpreter.validateArgumentCount(command, 2); + String prefixName = Interpreter.extractStringArgument(command, 0, "prefix name"); + String prefixIri = Interpreter.extractNameArgument(command, 1, "prefix IRI"); + + interpreter.getReasoner().getKnowledgeBase().getPrefixDeclarationRegistry().unsetPrefix(prefixName); + try { + interpreter.getReasoner().getKnowledgeBase().getPrefixDeclarationRegistry().setPrefixIri(prefixName, + prefixIri); + } catch (PrefixDeclarationException e) { // practically impossible + throw new CommandExecutionException("Setting prefix failed: " + e.getMessage()); + } + } + + @Override + public String getHelp(String commandName) { + return "Usage: @" + commandName + " : ."; + } + + @Override + public String getSynopsis() { + return "set a prefix to abbreviate long IRIs (only affects future inputs)"; + } + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetSourceCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetSourceCommandInterpreter.java new file mode 100644 index 000000000..416084b38 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetSourceCommandInterpreter.java @@ -0,0 +1,84 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class SetSourceCommandInterpreter implements CommandInterpreter { + + @Override + public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + Interpreter.validateArgumentCount(command, 2); + String predicateDeclaration = Interpreter.extractStringArgument(command, 0, "predicateName[arity]"); + PositiveLiteral sourceDeclaration = Interpreter.extractPositiveLiteralArgument(command, 1, + "source declaration"); + + String predicateName; + int arity; + try { + int openBracket = predicateDeclaration.indexOf('['); + int closeBracket = predicateDeclaration.indexOf(']'); + predicateName = predicateDeclaration.substring(0, openBracket); + String arityString = predicateDeclaration.substring(openBracket + 1, closeBracket); + arity = Integer.parseInt(arityString); + } catch (IndexOutOfBoundsException | NumberFormatException e) { + throw new CommandExecutionException( + "Predicate declaration must have the format \"predicateName[number]\" but was " + + predicateDeclaration); + } + Predicate predicate = Expressions.makePredicate(predicateName, arity); + + DataSource dataSource; + try { + dataSource = interpreter.getParserConfiguration() + .parseDataSourceSpecificPartOfDataSourceDeclaration(sourceDeclaration); + } catch (ParsingException e) { + throw new CommandExecutionException("Could not parse source declartion: " + e.getMessage()); + } + + if (dataSource.getRequiredArity().isPresent()) { + Integer requiredArity = dataSource.getRequiredArity().get(); + if (arity != requiredArity) { + throw new CommandExecutionException( + "Invalid arity " + arity + " for data source, " + "expected " + requiredArity + "."); + } + } + + interpreter.getReasoner().getKnowledgeBase().addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); + } + + @Override + public String getHelp(String commandName) { + return "Usage: @" + commandName + " []: ."; + } + + @Override + public String getSynopsis() { + return "define an external data source for a predicate"; + } + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java index 49cc7abe3..a3ac69b9c 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java @@ -85,6 +85,13 @@ public interface PrefixDeclarationRegistry extends Iterableprefixed * name into an absolute IRI. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Terms.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Terms.java index ba964c21c..8e8e05ac2 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Terms.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Terms.java @@ -132,6 +132,23 @@ public static String extractString(Term term) { "Term " + term.toString() + " is not a datatype constant of type xsd:string."); } + /** + * Returns the name of an abstract term, and throws an exception for all other + * cases. + * + * @param term the term from which the name is to be extracted + * @return extracted name + * @throws IllegalArgumentException if the given term is not an abstract + * constant + */ + public static String extractName(Term term) { + if (term.getType() == TermType.ABSTRACT_CONSTANT) { + return term.getName(); + } else { + throw new IllegalArgumentException("Term " + term.toString() + " is not an abstract constant."); + } + } + /** * Returns the IRI representation of an abstract term, and throws an exception * for all other cases. @@ -142,14 +159,11 @@ public static String extractString(Term term) { * constant or cannot be parsed as an IRI */ public static URI extractIri(Term term) { - if (term.getType() == TermType.ABSTRACT_CONSTANT) { - try { - return new URI(term.getName()); - } catch (URISyntaxException e) { - throw new IllegalArgumentException(e); - } + try { + return new URI(extractName(term)); + } catch (URISyntaxException e) { + throw new IllegalArgumentException(e); } - throw new IllegalArgumentException("Term " + term.toString() + " is not an abstract constant."); } /** @@ -162,14 +176,11 @@ public static URI extractIri(Term term) { * constant or cannot be parsed as a URL */ public static URL extractUrl(Term term) { - if (term.getType() == TermType.ABSTRACT_CONSTANT) { - try { - return new URL(term.getName()); - } catch (MalformedURLException e) { - throw new IllegalArgumentException(e); - } + try { + return new URL(extractName(term)); + } catch (MalformedURLException e) { + throw new IllegalArgumentException(e); } - throw new IllegalArgumentException("Term " + term.toString() + " is not an abstract constant."); } /** diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java index 2bb4e72a7..9f584fcec 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java @@ -64,6 +64,11 @@ public String getPrefixIri(String prefixName) throws PrefixDeclarationException return prefixes.get(prefixName); } + + @Override + public void unsetPrefix(String prefixName) { + prefixes.remove(prefixName); + } @Override public String resolvePrefixedName(String prefixedName) throws PrefixDeclarationException { diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java index ad03ba16e..7cfe63c3a 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -499,6 +499,15 @@ public void mergePrefixDeclarations(PrefixDeclarationRegistry prefixDeclarationR this.prefixDeclarationRegistry.mergePrefixDeclarations(prefixDeclarationRegistry); } + /** + * Returns the {@link PrefixDeclarationRegistry} used by this knowledge base. + * + * @return registry for prefix declarations + */ + public PrefixDeclarationRegistry getPrefixDeclarationRegistry() { + return this.prefixDeclarationRegistry; + } + /** * Return the base IRI. * @@ -564,7 +573,8 @@ public String unresolveAbsoluteIri(String iri) { * * @param stream the {@link OutputStream} to serialise to. * - * @throws IOException if an I/O error occurs while writing to given output stream + * @throws IOException if an I/O error occurs while writing to given output + * stream */ public void writeKnowledgeBase(OutputStream stream) throws IOException { stream.write(Serializer.getBaseAndPrefixDeclarations(this).getBytes()); From 992e64f1f8d9c8863c8f0d715f656d99440cc6cc Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 09:44:05 +0200 Subject: [PATCH 0653/1003] Improved error messages --- .../commands/AssertCommandInterpreter.java | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java index 0a4d0075e..759f5a059 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java @@ -30,8 +30,7 @@ public class AssertCommandInterpreter implements CommandInterpreter { @Override - public void run(Command command, Interpreter interpreter) - throws CommandExecutionException { + public void run(Command command, Interpreter interpreter) throws CommandExecutionException { for (Argument argument : command.getArguments()) { if (argument.fromPositiveLiteral().isPresent()) { @@ -46,16 +45,17 @@ public void run(Command command, Interpreter interpreter) } else if (argument.fromRule().isPresent()) { interpreter.getReasoner().getKnowledgeBase().addStatement(argument.fromRule().get()); } else { - throw new CommandExecutionException("Only facts and rules can be asserted."); + throw new CommandExecutionException( + "Only facts and rules can be asserted. Encountered " + argument.toString()); } } } - + @Override public String getHelp(String commandName) { - return "Usage: @" + commandName + " ()+ .\n" + - " fact or rule: statement(s) to be added to the knowledge base\n" + - "Reasoning needs to be invoked after finishing addition of statements."; + return "Usage: @" + commandName + " ()+ .\n" + + " fact or rule: statement(s) to be added to the knowledge base\n" + + "Reasoning needs to be invoked after finishing addition of statements."; } @Override From bdf5ba76d017745470c51337fca7d230f32ad967 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 09:48:38 +0200 Subject: [PATCH 0654/1003] Update Interpreter construction --- .../java/org/semanticweb/rulewerk/client/shell/Shell.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index 41f309f20..ef7c49ef4 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -30,8 +30,9 @@ import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; +import org.semanticweb.rulewerk.parser.ParserConfiguration; import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; -import org.slf4j.Logger; public class Shell { @@ -49,12 +50,11 @@ private Interpreter initializeInterpreter() { // final PrintStream out = this.terminal.writer().; final PrintStream out = System.out; - // FIXME connect logger; - final Logger logger = null; // TODO reasoner initial KB from args final KnowledgeBase knowledgeBase = new KnowledgeBase(); final Reasoner reasoner = new VLogReasoner(knowledgeBase); - final Interpreter interpreter = new Interpreter(reasoner, out, logger); + final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + final Interpreter interpreter = new Interpreter(reasoner, out, parserConfiguration); for (final ExitCommandName exitCommandName : ExitCommandName.values()) { interpreter.registerCommandInterpreter(exitCommandName.toString(), new ExitCommandInterpreter()); From 4d865026ee5c601c5d5b82d46ee1fd94dacb8bf8 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 10:49:00 +0200 Subject: [PATCH 0655/1003] Improve command handling in shell --- .../rulewerk/client/shell/CommandReader.java | 52 ++++++++------ .../client/shell/RulewerkApplication.java | 28 +++++++- .../rulewerk/client/shell/Shell.java | 71 ++++--------------- .../commands/ExitCommandInterpreter.java | 16 +++-- .../rulewerk/commands/Interpreter.java | 34 +++++++++ 5 files changed, 116 insertions(+), 85 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java index ea34fc323..9877ea6fa 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java @@ -24,53 +24,63 @@ import org.jline.reader.UserInterruptException; import org.jline.utils.AttributedString; import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter; -import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter.ExitCommandName; +import org.semanticweb.rulewerk.commands.Interpreter; import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.parser.ParsingException; -import org.semanticweb.rulewerk.parser.RuleParser; public class CommandReader { - public CommandReader(final LineReader lineReader, final PromptProvider promptProvider) { - super(); + private final LineReader lineReader; + private final PromptProvider promptProvider; + private final Interpreter interpreter; + + public CommandReader(final LineReader lineReader, final PromptProvider promptProvider, + final Interpreter interpreter) { + super(); // FIXME: there is no superclass? this.lineReader = lineReader; this.promptProvider = promptProvider; + this.interpreter = interpreter; } - private final LineReader lineReader; - - private final PromptProvider promptProvider; - + /** + * Reads a command from the prompt and returns a corresponding {@link Command} + * object. If no command should be executed, null is returned. Some effort is + * made to interpret mistyped commands by adding @ and . before and after the + * input, if forgotten. + * + * @return command or null + */ public Command readCommand() { - final String readLine; + String readLine; try { final AttributedString prompt = this.promptProvider.getPrompt(); readLine = this.lineReader.readLine(prompt.toAnsi(this.lineReader.getTerminal())); - } catch (final UserInterruptException e) { if (e.getPartialLine().isEmpty()) { // Exit request from user CTRL+C return ExitCommandInterpreter.EXIT_COMMAND; } else { - // TODO maybe create empty command - return null; + return null; // used as empty command } } - // TODO can readLIne be null? - // TODO does it trim trailing spaces? - if (ExitCommandName.isExitCommand(readLine)) { - return ExitCommandInterpreter.EXIT_COMMAND; + readLine = readLine.trim(); + if ("".equals(readLine)) { + return null; + } + if (readLine.charAt(0) != '@') { + readLine = "@" + readLine; + } + if (readLine.charAt(readLine.length() - 1) != '.') { + readLine = readLine + "."; } try { - return RuleParser.parseCommand(readLine); + return interpreter.parseCommand(readLine); } catch (final ParsingException e) { // FIXME do I need to flush terminal? - // TODO improve error message - this.lineReader.getTerminal().writer().println("Command cannot be parsed: " + e.getMessage()); - // return Input.EMPTY; - // TODO maybe create empty command + this.lineReader.getTerminal().writer() + .println("Error: " + e.getMessage() + "\n" + e.getCause().getMessage()); return null; } } diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java index 4cee43296..b01085223 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java @@ -21,20 +21,44 @@ */ import java.io.IOException; +import java.io.PrintStream; import org.jline.reader.LineReader; import org.jline.terminal.Terminal; +import org.semanticweb.rulewerk.commands.Interpreter; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; +import org.semanticweb.rulewerk.parser.ParserConfiguration; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; public class RulewerkApplication { public static void main(final String[] args) throws IOException { final Terminal terminal = DefaultConfiguration.buildTerminal(); - final Shell shell = new Shell(terminal); + final Interpreter interpreter = initializeInterpreter(terminal); + + final Shell shell = new Shell(interpreter); + final LineReader lineReader = DefaultConfiguration.buildLineReader(terminal); final PromptProvider promptProvider = DefaultConfiguration.buildPromptProvider(); - final CommandReader commandReader = new CommandReader(lineReader, promptProvider); + final CommandReader commandReader = new CommandReader(lineReader, promptProvider, interpreter); shell.run(commandReader); } + private static Interpreter initializeInterpreter(Terminal terminal) { + // FIXME connect terminal writer +// final PrintStream out = terminal.writer().; + final PrintStream out = System.out; + + // TODO reasoner initial KB from args + final KnowledgeBase knowledgeBase = new KnowledgeBase(); + final Reasoner reasoner = new VLogReasoner(knowledgeBase); + final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + final Interpreter interpreter = new Interpreter(reasoner, out, parserConfiguration); + + return interpreter; + } + } diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index ef7c49ef4..2555f3d50 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -1,75 +1,34 @@ package org.semanticweb.rulewerk.client.shell; -/*- - * #%L - * Rulewerk Client - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.io.PrintStream; - -import org.jline.terminal.Terminal; import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter; import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter.ExitCommandName; import org.semanticweb.rulewerk.commands.CommandExecutionException; +import org.semanticweb.rulewerk.commands.CommandInterpreter; import org.semanticweb.rulewerk.commands.Interpreter; import org.semanticweb.rulewerk.core.model.api.Command; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.Reasoner; -import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; -import org.semanticweb.rulewerk.parser.ParserConfiguration; -import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; public class Shell { - private final Terminal terminal; - private final Interpreter interpreter; + boolean running; - public Shell(final Terminal terminal) { - this.terminal = terminal; - this.interpreter = this.initializeInterpreter(); - } - - private Interpreter initializeInterpreter() { - // FIXME connect terminal writer -// final PrintStream out = this.terminal.writer().; - final PrintStream out = System.out; - - // TODO reasoner initial KB from args - final KnowledgeBase knowledgeBase = new KnowledgeBase(); - final Reasoner reasoner = new VLogReasoner(knowledgeBase); - final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); - final Interpreter interpreter = new Interpreter(reasoner, out, parserConfiguration); + public Shell(final Interpreter interpreter) { + this.interpreter = interpreter; + CommandInterpreter exitCommandInterpreter = new ExitCommandInterpreter(this); for (final ExitCommandName exitCommandName : ExitCommandName.values()) { - interpreter.registerCommandInterpreter(exitCommandName.toString(), new ExitCommandInterpreter()); + interpreter.registerCommandInterpreter(exitCommandName.toString(), exitCommandInterpreter); } - - return interpreter; } public void run(final CommandReader commandReader) { - while (true) { + running = true; + while (running) { final Command command; try { command = commandReader.readCommand(); } catch (final Exception e) { - // TODO: handle exception + interpreter.getOut().println("Unexpected error: " + e.getMessage()); continue; } @@ -77,15 +36,15 @@ public void run(final CommandReader commandReader) { try { this.interpreter.runCommand(command); } catch (final CommandExecutionException e) { - // TODO: handle exception - continue; - } - - if (ExitCommandName.isExitCommand(command.getName())) { - break; + interpreter.getOut().println("Error: " + e.getMessage()); } } } + interpreter.getOut().println("Rulewerk shell is stopped. Bye."); + } + + public void exitShell() { + this.running = false; } // @Override diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java index 13eb81671..1b7dc4d4c 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java @@ -22,6 +22,7 @@ import java.util.ArrayList; +import org.semanticweb.rulewerk.client.shell.Shell; import org.semanticweb.rulewerk.commands.CommandExecutionException; import org.semanticweb.rulewerk.commands.CommandInterpreter; import org.semanticweb.rulewerk.core.model.api.Command; @@ -43,24 +44,27 @@ public static boolean isExitCommand(final String commandName) { return false; } } + + final Shell shell; + + public ExitCommandInterpreter(Shell shell) { + this.shell = shell; + } @Override public String getHelp(final String commandName) { - // TODO Auto-generated method stub - return null; + return "Usage: " + commandName + "."; } @Override public String getSynopsis() { - // TODO Auto-generated method stub - return null; + return "exit Rulewerk shell"; } @Override public void run(final Command command, final org.semanticweb.rulewerk.commands.Interpreter interpreter) throws CommandExecutionException { - // TODO Auto-generated method stub - interpreter.getOut().println("Quiting rulewerk."); + this.shell.exitShell(); } } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index 404e98e6d..ddfc0b9c3 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -1,5 +1,8 @@ package org.semanticweb.rulewerk.commands; +import java.io.ByteArrayInputStream; +import java.io.InputStream; + /*- * #%L * Rulewerk Core Components @@ -23,12 +26,18 @@ import java.io.PrintStream; import java.util.HashMap; import java.util.List; +import java.util.Map.Entry; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Terms; import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.parser.ParserConfiguration; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.javacc.JavaCCParser; +import org.semanticweb.rulewerk.parser.javacc.ParseException; +import org.semanticweb.rulewerk.parser.javacc.TokenMgrError; public class Interpreter { @@ -66,6 +75,31 @@ public void runCommand(Command command) throws CommandExecutionException { throw new CommandExecutionException("Unknown command '" + command.getName() + "'"); } } + + public Command parseCommand(String commandString) throws ParsingException { + final InputStream inputStream = new ByteArrayInputStream(commandString.getBytes()); + final JavaCCParser localParser = new JavaCCParser(inputStream, "UTF-8"); + localParser.setParserConfiguration(parserConfiguration); + + // Copy prefixes from KB: + try { + localParser.getPrefixDeclarationRegistry().setBaseIri(reasoner.getKnowledgeBase().getBaseIri()); + for (Entry prefix : reasoner.getKnowledgeBase().getPrefixDeclarationRegistry()) { + localParser.getPrefixDeclarationRegistry().setPrefixIri(prefix.getKey(), prefix.getValue()); + } + } catch (PrefixDeclarationException e) { // unlikely! + throw new RuntimeException(e); + } + + Command result; + try { + result = localParser.command(); + localParser.ensureEndOfInput(); + } catch (ParseException | PrefixDeclarationException | TokenMgrError e) { + throw new ParsingException("Exception while parsing command.", e); + } + return result; + } public Reasoner getReasoner() { return reasoner; From d0fed2f1dc0dc3ddc7d4640b545699fbbd8c79c8 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 11:10:36 +0200 Subject: [PATCH 0656/1003] properly set encoding for String-based parsing --- .../java/org/semanticweb/rulewerk/commands/Interpreter.java | 3 ++- .../java/org/semanticweb/rulewerk/parser/RuleParser.java | 5 +++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index ddfc0b9c3..3a86661a3 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -24,6 +24,7 @@ */ import java.io.PrintStream; +import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.List; import java.util.Map.Entry; @@ -77,7 +78,7 @@ public void runCommand(Command command) throws CommandExecutionException { } public Command parseCommand(String commandString) throws ParsingException { - final InputStream inputStream = new ByteArrayInputStream(commandString.getBytes()); + final InputStream inputStream = new ByteArrayInputStream(commandString.getBytes(StandardCharsets.UTF_8)); final JavaCCParser localParser = new JavaCCParser(inputStream, "UTF-8"); localParser.setParserConfiguration(parserConfiguration); diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java index 124bb5381..12d186684 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java @@ -22,6 +22,7 @@ import java.io.ByteArrayInputStream; import java.io.InputStream; +import java.nio.charset.StandardCharsets; import java.util.List; import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; @@ -169,8 +170,8 @@ T parse(final JavaCCParser parser) */ static T parseSyntaxFragment(final String input, SyntaxFragmentParser parserAction, final String syntaxFragmentType, final ParserConfiguration parserConfiguration) throws ParsingException { - final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); - final JavaCCParser localParser = new JavaCCParser(inputStream, DEFAULT_STRING_ENCODING); + final InputStream inputStream = new ByteArrayInputStream(input.getBytes(StandardCharsets.UTF_8)); + final JavaCCParser localParser = new JavaCCParser(inputStream, "UTF-8"); if (parserConfiguration != null) { localParser.setParserConfiguration(parserConfiguration); From 394581c06bcb3aa1023b68156d8d2ed8b2290d50 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 11:12:11 +0200 Subject: [PATCH 0657/1003] license header --- .../rulewerk/client/shell/Shell.java | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index 2555f3d50..3075657ea 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -1,5 +1,25 @@ package org.semanticweb.rulewerk.client.shell; +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter; import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter.ExitCommandName; import org.semanticweb.rulewerk.commands.CommandExecutionException; From 44755a11bac2a2be0fc251bfb2e141bab3b7ff59 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 12:28:09 +0200 Subject: [PATCH 0658/1003] use PrintWriter instead of PrintStream --- .../rulewerk/client/shell/RulewerkApplication.java | 3 +-- .../semanticweb/rulewerk/commands/Interpreter.java | 12 ++++++------ 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java index b01085223..50b15d594 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java @@ -50,13 +50,12 @@ public static void main(final String[] args) throws IOException { private static Interpreter initializeInterpreter(Terminal terminal) { // FIXME connect terminal writer // final PrintStream out = terminal.writer().; - final PrintStream out = System.out; // TODO reasoner initial KB from args final KnowledgeBase knowledgeBase = new KnowledgeBase(); final Reasoner reasoner = new VLogReasoner(knowledgeBase); final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); - final Interpreter interpreter = new Interpreter(reasoner, out, parserConfiguration); + final Interpreter interpreter = new Interpreter(reasoner, terminal.writer(), parserConfiguration); return interpreter; } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index 3a86661a3..2de5ea02a 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -23,7 +23,7 @@ * #L% */ -import java.io.PrintStream; +import java.io.PrintWriter; import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.List; @@ -43,12 +43,12 @@ public class Interpreter { final Reasoner reasoner; - final PrintStream out; + final PrintWriter out; final ParserConfiguration parserConfiguration; final HashMap commandInterpreters = new HashMap<>(); - public Interpreter(Reasoner reasoner, PrintStream out, ParserConfiguration parserConfiguration) { + public Interpreter(Reasoner reasoner, PrintWriter out, ParserConfiguration parserConfiguration) { this.reasoner = reasoner; this.out = out; this.parserConfiguration = parserConfiguration; @@ -76,7 +76,7 @@ public void runCommand(Command command) throws CommandExecutionException { throw new CommandExecutionException("Unknown command '" + command.getName() + "'"); } } - + public Command parseCommand(String commandString) throws ParsingException { final InputStream inputStream = new ByteArrayInputStream(commandString.getBytes(StandardCharsets.UTF_8)); final JavaCCParser localParser = new JavaCCParser(inputStream, "UTF-8"); @@ -105,12 +105,12 @@ public Command parseCommand(String commandString) throws ParsingException { public Reasoner getReasoner() { return reasoner; } - + public ParserConfiguration getParserConfiguration() { return parserConfiguration; } - public PrintStream getOut() { + public PrintWriter getOut() { return out; } From e5fc2a847208b1d295ae25d0ebff4d913d9fc031 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 12:29:56 +0200 Subject: [PATCH 0659/1003] remove unused import --- .../semanticweb/rulewerk/client/shell/RulewerkApplication.java | 1 - 1 file changed, 1 deletion(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java index 50b15d594..186dad027 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java @@ -21,7 +21,6 @@ */ import java.io.IOException; -import java.io.PrintStream; import org.jline.reader.LineReader; import org.jline.terminal.Terminal; From 2a347a549497fd91bafe584eaa12b3015f90270e Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 12:34:18 +0200 Subject: [PATCH 0660/1003] print summary of asserts --- .../rulewerk/commands/AssertCommandInterpreter.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java index 759f5a059..081c6222c 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java @@ -32,6 +32,8 @@ public class AssertCommandInterpreter implements CommandInterpreter { @Override public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + int factCount = 0; + int ruleCount = 0; for (Argument argument : command.getArguments()) { if (argument.fromPositiveLiteral().isPresent()) { PositiveLiteral literal = argument.fromPositiveLiteral().get(); @@ -42,13 +44,17 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio throw new CommandExecutionException("Literal " + literal.toString() + " is not a fact.", e); } interpreter.getReasoner().getKnowledgeBase().addStatement(fact); + factCount++; } else if (argument.fromRule().isPresent()) { interpreter.getReasoner().getKnowledgeBase().addStatement(argument.fromRule().get()); + ruleCount++; } else { throw new CommandExecutionException( "Only facts and rules can be asserted. Encountered " + argument.toString()); } } + + interpreter.getOut().println("Asserted " + factCount + " fact(s) and " + ruleCount + " rules."); } @Override From e49bc0eabbcc0b654c60f6ca10c3dc6a4d9e28b0 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 12:39:14 +0200 Subject: [PATCH 0661/1003] More robust exception handling in parsing --- .../main/java/org/semanticweb/rulewerk/client/shell/Shell.java | 1 + .../java/org/semanticweb/rulewerk/commands/Interpreter.java | 2 +- .../main/java/org/semanticweb/rulewerk/parser/RuleParser.java | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index 3075657ea..8c5edf214 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -49,6 +49,7 @@ public void run(final CommandReader commandReader) { command = commandReader.readCommand(); } catch (final Exception e) { interpreter.getOut().println("Unexpected error: " + e.getMessage()); + e.printStackTrace(); continue; } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index 2de5ea02a..b4d70b0aa 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -96,7 +96,7 @@ public Command parseCommand(String commandString) throws ParsingException { try { result = localParser.command(); localParser.ensureEndOfInput(); - } catch (ParseException | PrefixDeclarationException | TokenMgrError e) { + } catch (ParseException | PrefixDeclarationException | TokenMgrError | RuntimeException e) { throw new ParsingException("Exception while parsing command.", e); } return result; diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java index 12d186684..146fa1085 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java @@ -181,7 +181,7 @@ static T parseSyntaxFragment(final String input, SyntaxFragme try { result = parserAction.parse(localParser); localParser.ensureEndOfInput(); - } catch (ParseException | PrefixDeclarationException | TokenMgrError e) { + } catch (ParseException | PrefixDeclarationException | TokenMgrError | RuntimeException e) { LOGGER.error("Exception while parsing " + syntaxFragmentType + ": {}!", input); throw new ParsingException("Exception while parsing " + syntaxFragmentType, e); } From 9a2e7072bb940dbe9fa40804089264d139856957 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 12:45:09 +0200 Subject: [PATCH 0662/1003] better error reporting --- .../java/org/semanticweb/rulewerk/commands/Interpreter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index b4d70b0aa..46f6493c2 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -97,7 +97,7 @@ public Command parseCommand(String commandString) throws ParsingException { result = localParser.command(); localParser.ensureEndOfInput(); } catch (ParseException | PrefixDeclarationException | TokenMgrError | RuntimeException e) { - throw new ParsingException("Exception while parsing command.", e); + throw new ParsingException("failed to parse command \"\"\"" + commandString + "\"\"\"", e); } return result; } From 8aff9426d7512e508b73d7e4b7db08f4c7b76a8a Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 13:12:47 +0200 Subject: [PATCH 0663/1003] Remove unused import --- .../java/org/semanticweb/rulewerk/core/model/api/Argument.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java index dfbd0c771..e25136bd3 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java @@ -23,8 +23,6 @@ import java.util.Optional; import java.util.function.Function; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; - /** * A tagged union representing the possible types allowed to appear as arguments * in commands and parser directives. From 2a05cb664bcb86879f4750a0d1296cbfc7e7a374 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 13:45:15 +0200 Subject: [PATCH 0664/1003] write KB to Writer, not to OutputStream --- .../rulewerk/core/reasoner/KnowledgeBase.java | 33 ++-- .../implementation/QueryResultImpl.java | 151 ++++++++++++++++++ .../core/reasoner/KnowledgeBaseTest.java | 21 ++- 3 files changed, 180 insertions(+), 25 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java index 7cfe63c3a..5dc6e398c 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -22,10 +22,12 @@ import java.io.File; import java.io.FileInputStream; -import java.io.FileOutputStream; +import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.io.Writer; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -571,27 +573,27 @@ public String unresolveAbsoluteIri(String iri) { /** * Serialise the KnowledgeBase to the {@link OutputStream}. * - * @param stream the {@link OutputStream} to serialise to. + * @param writer the {@link OutputStream} to serialise to. * * @throws IOException if an I/O error occurs while writing to given output * stream */ - public void writeKnowledgeBase(OutputStream stream) throws IOException { - stream.write(Serializer.getBaseAndPrefixDeclarations(this).getBytes()); + public void writeKnowledgeBase(Writer writer) throws IOException { + writer.write(Serializer.getBaseAndPrefixDeclarations(this)); for (DataSourceDeclaration dataSource : this.getDataSourceDeclarations()) { - stream.write(Serializer.getString(dataSource).getBytes()); - stream.write('\n'); + writer.write(Serializer.getString(dataSource)); + writer.write('\n'); } - for (Rule rule : this.getRules()) { - stream.write(Serializer.getString(rule).getBytes()); - stream.write('\n'); + for (Fact fact : this.getFacts()) { + writer.write(Serializer.getFactString(fact)); + writer.write('\n'); } - for (Fact fact : this.getFacts()) { - stream.write(Serializer.getFactString(fact).getBytes()); - stream.write('\n'); + for (Rule rule : this.getRules()) { + writer.write(Serializer.getString(rule)); + writer.write('\n'); } } @@ -601,10 +603,13 @@ public void writeKnowledgeBase(OutputStream stream) throws IOException { * @param filePath path to the file to serialise into. * * @throws IOException + * @deprecated Use {@link KnowledgeBase#writeKnowledgeBase(Writer)} instead. The + * method will disappear. */ + @Deprecated public void writeKnowledgeBase(String filePath) throws IOException { - try (OutputStream stream = new FileOutputStream(filePath)) { - this.writeKnowledgeBase(stream); + try (FileWriter writer = new FileWriter(filePath, StandardCharsets.UTF_8)) { + this.writeKnowledgeBase(writer); } } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java index 7d8a06f24..5207b1087 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java @@ -1,5 +1,9 @@ package org.semanticweb.rulewerk.core.reasoner.implementation; +import java.util.Arrays; +import java.util.Collection; +import java.util.Iterator; + /* * #%L * Rulewerk Core Components @@ -21,23 +25,170 @@ */ import java.util.List; +import java.util.ListIterator; import org.semanticweb.rulewerk.core.model.api.QueryResult; import org.semanticweb.rulewerk.core.model.api.Term; /** * Implements {@link QueryResult}s. + * * @author Irina Dragoste * */ public final class QueryResultImpl implements QueryResult { + static class ShallowTermList implements List { + + final Term[] data; + + public ShallowTermList(Term[] data) { + this.data = data; + } + + UnsupportedOperationException uoe() { return new UnsupportedOperationException(); } + + @Override public boolean add(Term e) { throw uoe(); } + @Override public boolean addAll(Collection c) { throw uoe(); } + @Override public void clear() { throw uoe(); } + @Override public boolean remove(Object o) { throw uoe(); } + @Override public boolean removeAll(Collection c) { throw uoe(); } + @Override public boolean retainAll(Collection c) { throw uoe(); } + @Override public void add(int index, Term element) { throw uoe(); } + @Override public boolean addAll(int index, Collection c) { throw uoe(); } + @Override public Term remove(int index) { throw uoe(); } + + @Override + public boolean contains(Object o) { + return indexOf(o) >= 0; + } + + @Override + public boolean containsAll(Collection arg0) { + // TODO Auto-generated method stub + return false; + } + + @Override + public Term get(int index) { + return data[index]; + } + + @Override + public int indexOf(Object o) { + for (int i = 0, s = size(); i < s; i++) { + if (get(i).equals(o)) { + return i; + } + } + return -1; + } + + @Override + public boolean isEmpty() { + return size() == 0; + } + + @Override + public Iterator iterator() { + // TODO Auto-generated method stub + return null; + } + + @Override + public int lastIndexOf(Object arg0) { + // TODO Auto-generated method stub + return 0; + } + + @Override + public ListIterator listIterator() { + // TODO Auto-generated method stub + return null; + } + + @Override + public ListIterator listIterator(int arg0) { + // TODO Auto-generated method stub + return null; + } + + @Override + public Term set(int arg0, Term arg1) { + throw uoe(); + } + + @Override + public int size() { + return data.length; + } + + @Override + public List subList(int arg0, int arg1) { + // TODO Auto-generated method stub + return null; + } + + @Override + public Object[] toArray() { + return Arrays.copyOf(data, data.length); + } + + @Override + @SuppressWarnings("unchecked") + public T[] toArray(T[] a) { + int size = data.length; + if (a.length < size) { + // Make a new array of a's runtime type, but my contents: + return (T[]) Arrays.copyOf(data, size, a.getClass()); + } + System.arraycopy(data, 0, a, 0, size); + if (a.length > size) { + a[size] = null; // null-terminate + } + return a; + } + + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + + if (!(o instanceof List)) { + return false; + } + + Iterator oit = ((List) o).iterator(); + for (int i = 0, s = size(); i < s; i++) { + if (!oit.hasNext() || !get(i).equals(oit.next())) { + return false; + } + } + return !oit.hasNext(); + } + + @Override + public int hashCode() { + int hash = 1; + for (int i = 0, s = size(); i < s; i++) { + hash = 31 * hash + get(i).hashCode(); + } + return hash; + } + } + private final List terms; public QueryResultImpl(List terms) { this.terms = terms; } + public static QueryResultImpl fromArray(Term[] terms) { + return new QueryResultImpl(new ShallowTermList(terms)); + } + @Override public List getTerms() { return this.terms; diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java index d989d1a9f..91a350b65 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java @@ -22,9 +22,8 @@ import static org.junit.Assert.*; -import java.io.ByteArrayOutputStream; import java.io.IOException; -import java.io.OutputStream; +import java.io.StringWriter; import java.net.URL; import java.util.Arrays; @@ -133,9 +132,9 @@ public void mergePrefixDeclarations_merge_succeeds() throws PrefixDeclarationExc @Test public void writeKnowledgeBase_justFacts_succeeds() throws IOException { - OutputStream stream = new ByteArrayOutputStream(); - this.kb.writeKnowledgeBase(stream); - assertEquals("P(c) .\nP(d) .\nQ(c) .\n", stream.toString()); + StringWriter writer = new StringWriter(); + this.kb.writeKnowledgeBase(writer); + assertEquals("P(c) .\nP(d) .\nQ(c) .\n", writer.toString()); } @Test @@ -144,9 +143,9 @@ public void writeKnowledgeBase_withBase_succeeds() throws IOException { MergingPrefixDeclarationRegistry prefixDeclarations = new MergingPrefixDeclarationRegistry(); prefixDeclarations.setBaseIri(baseIri); this.kb.mergePrefixDeclarations(prefixDeclarations); - OutputStream stream = new ByteArrayOutputStream(); - this.kb.writeKnowledgeBase(stream); - assertEquals("@base <" + baseIri + "> .\nP(c) .\nP(d) .\nQ(c) .\n", stream.toString()); + StringWriter writer = new StringWriter(); + this.kb.writeKnowledgeBase(writer); + assertEquals("@base <" + baseIri + "> .\nP(c) .\nP(d) .\nQ(c) .\n", writer.toString()); } @Test @@ -157,9 +156,9 @@ public void writeKnowledgeBase_alsoRuleAndDataSource_succeeds() throws IOExcepti this.kb.addStatement(new DataSourceDeclarationImpl(Expressions.makePredicate("S", 1), new SparqlQueryResultDataSource(new URL(sparqlIri), "?X", sparqlBgp))); - OutputStream stream = new ByteArrayOutputStream(); - this.kb.writeKnowledgeBase(stream); + StringWriter writer = new StringWriter(); + this.kb.writeKnowledgeBase(writer); assertEquals("@source S[1]: sparql(<" + sparqlIri + ">, \"?X\", \"" + sparqlBgp - + "\") .\nP(?X) :- Q(?X) .\nP(c) .\nP(d) .\nQ(c) .\n", stream.toString()); + + "\") .\nP(c) .\nP(d) .\nQ(c) .\nP(?X) :- Q(?X) .\n", writer.toString()); } } From dc7a85284528ba13a63dfded8da7091d3023d777 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 13:54:03 +0200 Subject: [PATCH 0665/1003] improed format for command-specific help --- .../semanticweb/rulewerk/commands/HelpCommandInterpreter.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java index a94b0fa30..8488b42f9 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java @@ -37,6 +37,8 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio && command.getArguments().get(0).fromTerm().get().getType() == TermType.ABSTRACT_CONSTANT) { String helpCommand = command.getArguments().get(0).fromTerm().get().getName(); if (interpreter.commandInterpreters.containsKey(helpCommand)) { + interpreter.getOut().println( + "@" + helpCommand + ": " + interpreter.commandInterpreters.get(helpCommand).getSynopsis()); interpreter.getOut().println(interpreter.commandInterpreters.get(helpCommand).getHelp(helpCommand)); } else { interpreter.getOut().println("Command '" + helpCommand + "' not known."); From a3b03f0d9ccf6d36e263e62d7915030803d0d5c6 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 13:54:25 +0200 Subject: [PATCH 0666/1003] new command to show KB contents --- .../rulewerk/commands/Interpreter.java | 1 + .../commands/ShowKbCommandInterpreter.java | 29 +++++++++++++++++++ 2 files changed, 30 insertions(+) create mode 100644 rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index 46f6493c2..c057d96ef 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -122,6 +122,7 @@ private void registerDefaultCommandInterpreters() { registerCommandInterpreter("load", new LoadCommandInterpreter()); registerCommandInterpreter("setprefix", new SetPrefixCommandInterpreter()); registerCommandInterpreter("setsource", new SetSourceCommandInterpreter()); + registerCommandInterpreter("showkb", new ShowKbCommandInterpreter()); } /** diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java new file mode 100644 index 000000000..2efb728b9 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java @@ -0,0 +1,29 @@ +package org.semanticweb.rulewerk.commands; + +import java.io.IOException; + +import org.semanticweb.rulewerk.core.model.api.Command; + +public class ShowKbCommandInterpreter implements CommandInterpreter { + + @Override + public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + Interpreter.validateArgumentCount(command, 0); + try { + interpreter.getReasoner().getKnowledgeBase().writeKnowledgeBase(interpreter.getOut()); + } catch (IOException e) { + throw new CommandExecutionException(e.getMessage(), e); + } + } + + @Override + public String getHelp(String commandName) { + return "Usage: @" + commandName + "."; + } + + @Override + public String getSynopsis() { + return "displays the content of the knowledge base"; + } + +} From ba989e4d9ce05999668b2d5908f4618b444202da Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 14:10:47 +0200 Subject: [PATCH 0667/1003] rename setsource to addsource --- ...Interpreter.java => AddSourceCommandInterpreter.java} | 9 ++++++--- .../org/semanticweb/rulewerk/commands/Interpreter.java | 2 +- 2 files changed, 7 insertions(+), 4 deletions(-) rename rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/{SetSourceCommandInterpreter.java => AddSourceCommandInterpreter.java} (89%) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetSourceCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java similarity index 89% rename from rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetSourceCommandInterpreter.java rename to rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java index 416084b38..c80c63f3f 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetSourceCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java @@ -28,7 +28,7 @@ import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.parser.ParsingException; -public class SetSourceCommandInterpreter implements CommandInterpreter { +public class AddSourceCommandInterpreter implements CommandInterpreter { @Override public void run(Command command, Interpreter interpreter) throws CommandExecutionException { @@ -73,12 +73,15 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio @Override public String getHelp(String commandName) { - return "Usage: @" + commandName + " []: ."; + return "Usage: @" + commandName + " []: .\n" + + " [] : the name of the predicate and its arity\n" + + " : a fact specifying a source declaration\n\n" + + "Note that every predicate can have multiple sources."; } @Override public String getSynopsis() { - return "define an external data source for a predicate"; + return "define a new external data source for a predicate"; } } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index c057d96ef..156e2d24f 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -121,7 +121,7 @@ private void registerDefaultCommandInterpreters() { registerCommandInterpreter("reason", new ReasonCommandInterpreter()); registerCommandInterpreter("load", new LoadCommandInterpreter()); registerCommandInterpreter("setprefix", new SetPrefixCommandInterpreter()); - registerCommandInterpreter("setsource", new SetSourceCommandInterpreter()); + registerCommandInterpreter("addsource", new AddSourceCommandInterpreter()); registerCommandInterpreter("showkb", new ShowKbCommandInterpreter()); } From ce27b691910ea9457b32559b29d2d2330e1c6ce7 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 14:10:53 +0200 Subject: [PATCH 0668/1003] license header --- .../commands/ShowKbCommandInterpreter.java | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java index 2efb728b9..86ea91d12 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java @@ -1,5 +1,25 @@ package org.semanticweb.rulewerk.commands; +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import java.io.IOException; import org.semanticweb.rulewerk.core.model.api.Command; From c144422c1f2364ea9c19bcd10a8e70938c528101 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 14:11:01 +0200 Subject: [PATCH 0669/1003] more informative help --- .../semanticweb/rulewerk/commands/HelpCommandInterpreter.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java index 8488b42f9..94b687195 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java @@ -33,6 +33,9 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio interpreter.getOut().println( " @" + commandName + ": " + interpreter.commandInterpreters.get(commandName).getSynopsis()); } + interpreter.getOut().println(); + interpreter.getOut() + .println("For more information on any command, use @" + command.getName() + " [command name]."); } else if (command.getArguments().size() == 1 && command.getArguments().get(0).fromTerm().isPresent() && command.getArguments().get(0).fromTerm().get().getType() == TermType.ABSTRACT_CONSTANT) { String helpCommand = command.getArguments().get(0).fromTerm().get().getName(); From 08a0b0e1b7a4f54310714bb3890f38f113f9969c Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 14:39:34 +0200 Subject: [PATCH 0670/1003] convenience method to access KB --- .../rulewerk/commands/AssertCommandInterpreter.java | 6 +++--- .../java/org/semanticweb/rulewerk/commands/Interpreter.java | 5 +++++ .../rulewerk/commands/LoadCommandInterpreter.java | 2 +- .../rulewerk/commands/SetPrefixCommandInterpreter.java | 5 ++--- .../rulewerk/commands/ShowKbCommandInterpreter.java | 2 +- 5 files changed, 12 insertions(+), 8 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java index 081c6222c..b8f02719b 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java @@ -43,17 +43,17 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } catch (IllegalArgumentException e) { throw new CommandExecutionException("Literal " + literal.toString() + " is not a fact.", e); } - interpreter.getReasoner().getKnowledgeBase().addStatement(fact); + interpreter.getKnowledgeBase().addStatement(fact); factCount++; } else if (argument.fromRule().isPresent()) { - interpreter.getReasoner().getKnowledgeBase().addStatement(argument.fromRule().get()); + interpreter.getKnowledgeBase().addStatement(argument.fromRule().get()); ruleCount++; } else { throw new CommandExecutionException( "Only facts and rules can be asserted. Encountered " + argument.toString()); } } - + interpreter.getOut().println("Asserted " + factCount + " fact(s) and " + ruleCount + " rules."); } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index 156e2d24f..0a13f02e3 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -33,6 +33,7 @@ import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Terms; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.parser.ParserConfiguration; import org.semanticweb.rulewerk.parser.ParsingException; @@ -106,6 +107,10 @@ public Reasoner getReasoner() { return reasoner; } + public KnowledgeBase getKnowledgeBase() { + return reasoner.getKnowledgeBase(); + } + public ParserConfiguration getParserConfiguration() { return parserConfiguration; } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index dde5e9d18..e1dda80e5 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -43,7 +43,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } try { FileInputStream fileInputStream = new FileInputStream(fileName); - RuleParser.parseInto(interpreter.getReasoner().getKnowledgeBase(), fileInputStream); + RuleParser.parseInto(interpreter.getKnowledgeBase(), fileInputStream); } catch (FileNotFoundException e) { throw new CommandExecutionException(e.getMessage(), e); } catch (ParsingException e) { diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java index 7b9427d02..ed31fb400 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java @@ -31,10 +31,9 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio String prefixName = Interpreter.extractStringArgument(command, 0, "prefix name"); String prefixIri = Interpreter.extractNameArgument(command, 1, "prefix IRI"); - interpreter.getReasoner().getKnowledgeBase().getPrefixDeclarationRegistry().unsetPrefix(prefixName); + interpreter.getKnowledgeBase().getPrefixDeclarationRegistry().unsetPrefix(prefixName); try { - interpreter.getReasoner().getKnowledgeBase().getPrefixDeclarationRegistry().setPrefixIri(prefixName, - prefixIri); + interpreter.getKnowledgeBase().getPrefixDeclarationRegistry().setPrefixIri(prefixName, prefixIri); } catch (PrefixDeclarationException e) { // practically impossible throw new CommandExecutionException("Setting prefix failed: " + e.getMessage()); } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java index 86ea91d12..2927d4e34 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java @@ -30,7 +30,7 @@ public class ShowKbCommandInterpreter implements CommandInterpreter { public void run(Command command, Interpreter interpreter) throws CommandExecutionException { Interpreter.validateArgumentCount(command, 0); try { - interpreter.getReasoner().getKnowledgeBase().writeKnowledgeBase(interpreter.getOut()); + interpreter.getKnowledgeBase().writeKnowledgeBase(interpreter.getOut()); } catch (IOException e) { throw new CommandExecutionException(e.getMessage(), e); } From 5e480211cd61992beabbea9592f3c997addcc507 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 14:43:00 +0200 Subject: [PATCH 0671/1003] improved message --- .../semanticweb/rulewerk/commands/AssertCommandInterpreter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java index b8f02719b..c83b2ae46 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java @@ -54,7 +54,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } } - interpreter.getOut().println("Asserted " + factCount + " fact(s) and " + ruleCount + " rules."); + interpreter.getOut().println("Asserted " + factCount + " fact(s) and " + ruleCount + " rule(s)."); } @Override From 952708889ae60c6df6efeefca7ee8ee2eb6d2505 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 14:43:26 +0200 Subject: [PATCH 0672/1003] flush message before starting to reason --- .../semanticweb/rulewerk/commands/ReasonCommandInterpreter.java | 1 + 1 file changed, 1 insertion(+) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java index 6ecfd8944..07d600d71 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java @@ -35,6 +35,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } interpreter.getOut().println("Loading and materializing inferences ..."); + interpreter.getOut().flush(); Timer timer = new Timer("reasoning"); timer.start(); From e84695dc890b9e0a3e26c2b75e8120c447497059 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 14:44:39 +0200 Subject: [PATCH 0673/1003] flush goodbye before exiting --- .../main/java/org/semanticweb/rulewerk/client/shell/Shell.java | 1 + 1 file changed, 1 insertion(+) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index 8c5edf214..4c1e9daec 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -62,6 +62,7 @@ public void run(final CommandReader commandReader) { } } interpreter.getOut().println("Rulewerk shell is stopped. Bye."); + interpreter.getOut().flush(); } public void exitShell() { From 5de1b7326c2585f4b6c5558169096ae1a4172938 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 15:08:15 +0200 Subject: [PATCH 0674/1003] support parsing of partial source declarations --- .../semanticweb/rulewerk/parser/javacc/JavaCCParser.jj | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj index e73b4b222..3dbff82a7 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj @@ -39,6 +39,7 @@ import java.net.URISyntaxException; import java.util.List; import java.util.ArrayList; +import java.util.Arrays; import java.util.LinkedList; import java.util.ArrayDeque; import java.util.Deque; @@ -166,10 +167,14 @@ Command command() throws PrefixDeclarationException : { } { name = < CUSTOM_DIRECTIVE > - ( LOOKAHEAD(predicateName() < ARITY >) predicateName = predicateName() arity = < ARITY > < COLON > arguments = Arguments() < DOT > { + ( LOOKAHEAD(predicateName() < ARITY > ) predicateName = predicateName() arity = < ARITY > < DOT > { + Argument argument = Argument.term(Expressions.makeDatatypeConstant(predicateName + "[" + arity.image + "]:", PrefixDeclarationRegistry.XSD_STRING)); + return new Command(name.image, Arrays.asList(argument)); + } + | LOOKAHEAD(predicateName() < ARITY >) predicateName = predicateName() arity = < ARITY > < COLON > arguments = Arguments() < DOT > { arguments.addFirst(Argument.term(Expressions.makeDatatypeConstant(predicateName + "[" + arity.image + "]:", PrefixDeclarationRegistry.XSD_STRING))); return new Command(name.image,arguments); - } + } | arguments = Arguments() < DOT > { return new Command(name.image,arguments); } | pn = < PNAME_NS > arguments = Arguments() < DOT > { arguments.addFirst(Argument.term(Expressions.makeDatatypeConstant(pn.image, PrefixDeclarationRegistry.XSD_STRING))); From 4d690941413795f362e73e3fcb5992b7a619f559 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 15:10:23 +0200 Subject: [PATCH 0675/1003] support removal of sources --- .../commands/AddSourceCommandInterpreter.java | 59 +++++++------ .../rulewerk/commands/Interpreter.java | 1 + .../RemoveSourceCommandInterpreter.java | 82 +++++++++++++++++++ 3 files changed, 116 insertions(+), 26 deletions(-) create mode 100644 rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java index c80c63f3f..f0182e3dd 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java @@ -37,6 +37,34 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio PositiveLiteral sourceDeclaration = Interpreter.extractPositiveLiteralArgument(command, 1, "source declaration"); + Predicate predicate = extractPredicate(predicateDeclaration); + DataSource dataSource = extractDataSource(sourceDeclaration, interpreter); + + if (dataSource.getRequiredArity().isPresent()) { + Integer requiredArity = dataSource.getRequiredArity().get(); + if (predicate.getArity() != requiredArity) { + throw new CommandExecutionException("Invalid arity " + predicate.getArity() + " for data source, " + + "expected " + requiredArity + "."); + } + } + + interpreter.getKnowledgeBase().addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); + } + + @Override + public String getHelp(String commandName) { + return "Usage: @" + commandName + " []: .\n" + + " [] : the name of the predicate and its arity\n" + + " : a fact specifying a source declaration\n\n" + + "Note that every predicate can have multiple sources."; + } + + @Override + public String getSynopsis() { + return "define a new external data source for a predicate"; + } + + static Predicate extractPredicate(String predicateDeclaration) throws CommandExecutionException { String predicateName; int arity; try { @@ -50,38 +78,17 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio "Predicate declaration must have the format \"predicateName[number]\" but was " + predicateDeclaration); } - Predicate predicate = Expressions.makePredicate(predicateName, arity); + return Expressions.makePredicate(predicateName, arity); + } - DataSource dataSource; + static DataSource extractDataSource(PositiveLiteral sourceDeclaration, Interpreter interpreter) + throws CommandExecutionException { try { - dataSource = interpreter.getParserConfiguration() + return interpreter.getParserConfiguration() .parseDataSourceSpecificPartOfDataSourceDeclaration(sourceDeclaration); } catch (ParsingException e) { throw new CommandExecutionException("Could not parse source declartion: " + e.getMessage()); } - - if (dataSource.getRequiredArity().isPresent()) { - Integer requiredArity = dataSource.getRequiredArity().get(); - if (arity != requiredArity) { - throw new CommandExecutionException( - "Invalid arity " + arity + " for data source, " + "expected " + requiredArity + "."); - } - } - - interpreter.getReasoner().getKnowledgeBase().addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); - } - - @Override - public String getHelp(String commandName) { - return "Usage: @" + commandName + " []: .\n" - + " [] : the name of the predicate and its arity\n" - + " : a fact specifying a source declaration\n\n" - + "Note that every predicate can have multiple sources."; - } - - @Override - public String getSynopsis() { - return "define a new external data source for a predicate"; } } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index 0a13f02e3..99ff2dac3 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -127,6 +127,7 @@ private void registerDefaultCommandInterpreters() { registerCommandInterpreter("load", new LoadCommandInterpreter()); registerCommandInterpreter("setprefix", new SetPrefixCommandInterpreter()); registerCommandInterpreter("addsource", new AddSourceCommandInterpreter()); + registerCommandInterpreter("delsource", new RemoveSourceCommandInterpreter()); registerCommandInterpreter("showkb", new ShowKbCommandInterpreter()); } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java new file mode 100644 index 000000000..1db1b96ff --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java @@ -0,0 +1,82 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; + +public class RemoveSourceCommandInterpreter implements CommandInterpreter { + + @Override + public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + if (command.getArguments().size() == 0 || command.getArguments().size() > 2) { + throw new CommandExecutionException("This command requires one or two arguments."); + } + + String predicateDeclaration = Interpreter.extractStringArgument(command, 0, "predicateName[arity]"); + Predicate predicate = AddSourceCommandInterpreter.extractPredicate(predicateDeclaration); + DataSource dataSource = null; + if (command.getArguments().size() == 2) { + PositiveLiteral sourceDeclaration = Interpreter.extractPositiveLiteralArgument(command, 1, + "source declaration"); + dataSource = AddSourceCommandInterpreter.extractDataSource(sourceDeclaration, interpreter); + } + + if (dataSource != null) { + DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, dataSource); + if (interpreter.getKnowledgeBase().getStatements().contains(dataSourceDeclaration)) { + interpreter.getKnowledgeBase().removeStatement(dataSourceDeclaration); + interpreter.getOut().println("Removed specified data source declaration."); + } else { + interpreter.getOut().println("Specified data source declaration not found in knowledge base."); + } + } else { + int count = 0; + for (DataSourceDeclaration dataSourceDeclaration : interpreter.getKnowledgeBase() + .getDataSourceDeclarations()) { + if (dataSourceDeclaration.getPredicate().equals(predicate)) { + interpreter.getKnowledgeBase().removeStatement(dataSourceDeclaration); + count++; + } + } + interpreter.getOut().println("Removed " + count + " matching data source declaration(s)."); + } + + } + + @Override + public String getHelp(String commandName) { + return "Usage: @" + commandName + " []: .\n" + + " [] : the name of the predicate and its arity\n" + + " (optional): a fact specifying a source declaration\n\n" + + "Note that every predicate can have multiple sources."; + } + + @Override + public String getSynopsis() { + return "remove one or all external data sources for a predicate"; + } + +} From 05a427857d307bcebd8460a85f098130937f1c67 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 15:34:27 +0200 Subject: [PATCH 0676/1003] let KnowledgeBase report on removed statements --- .../RemoveSourceCommandInterpreter.java | 3 +-- .../rulewerk/core/reasoner/KnowledgeBase.java | 17 +++++++++++++---- 2 files changed, 14 insertions(+), 6 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java index 1db1b96ff..8577d9e7a 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java @@ -46,8 +46,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio if (dataSource != null) { DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, dataSource); - if (interpreter.getKnowledgeBase().getStatements().contains(dataSourceDeclaration)) { - interpreter.getKnowledgeBase().removeStatement(dataSourceDeclaration); + if (interpreter.getKnowledgeBase().removeStatement(dataSourceDeclaration) > 0) { interpreter.getOut().println("Removed specified data source declaration."); } else { interpreter.getOut().println("Specified data source declaration not found in knowledge base."); diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java index 5dc6e398c..40cb247ef 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -273,13 +273,18 @@ public void addStatements(final Statement... statements) { } /** - * Removes a single statement from the knowledge base. + * Removes a single statement from the knowledge base, and returns the number of + * statements that were actually removed (0 or 1). * * @param statement the statement to remove + * @return number of removed statements */ - public void removeStatement(final Statement statement) { + public int removeStatement(final Statement statement) { if (this.doRemoveStatement(statement)) { this.notifyListenersOnStatementRemoved(statement); + return 1; + } else { + return 0; } } @@ -303,8 +308,9 @@ boolean doRemoveStatement(final Statement statement) { * Removes a collection of statements to the knowledge base. * * @param statements the statements to remove + * @return number of removed statements */ - public void removeStatements(final Collection statements) { + public int removeStatements(final Collection statements) { final List removedStatements = new ArrayList<>(); for (final Statement statement : statements) { @@ -314,14 +320,16 @@ public void removeStatements(final Collection statements) { } this.notifyListenersOnStatementsRemoved(removedStatements); + return removedStatements.size(); } /** * Removes a list of statements from the knowledge base. * * @param statements the statements to remove + * @return number of removed statements */ - public void removeStatements(final Statement... statements) { + public int removeStatements(final Statement... statements) { final List removedStatements = new ArrayList<>(); for (final Statement statement : statements) { @@ -331,6 +339,7 @@ public void removeStatements(final Statement... statements) { } this.notifyListenersOnStatementsRemoved(removedStatements); + return removedStatements.size(); } private void notifyListenersOnStatementAdded(final Statement addedStatement) { From b40db91150b41be152fc5e2c3b353eb48692e8ee Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 15:34:58 +0200 Subject: [PATCH 0677/1003] support retraction of statements --- .../rulewerk/commands/Interpreter.java | 9 +-- .../commands/RetractCommandInterpreter.java | 68 +++++++++++++++++++ 2 files changed, 73 insertions(+), 4 deletions(-) create mode 100644 rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index 99ff2dac3..1dd005700 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -121,13 +121,14 @@ public PrintWriter getOut() { private void registerDefaultCommandInterpreters() { registerCommandInterpreter("help", new HelpCommandInterpreter()); - registerCommandInterpreter("assert", new AssertCommandInterpreter()); - registerCommandInterpreter("query", new QueryCommandInterpreter()); - registerCommandInterpreter("reason", new ReasonCommandInterpreter()); registerCommandInterpreter("load", new LoadCommandInterpreter()); - registerCommandInterpreter("setprefix", new SetPrefixCommandInterpreter()); + registerCommandInterpreter("assert", new AssertCommandInterpreter()); + registerCommandInterpreter("retract", new RetractCommandInterpreter()); registerCommandInterpreter("addsource", new AddSourceCommandInterpreter()); registerCommandInterpreter("delsource", new RemoveSourceCommandInterpreter()); + registerCommandInterpreter("setprefix", new SetPrefixCommandInterpreter()); + registerCommandInterpreter("reason", new ReasonCommandInterpreter()); + registerCommandInterpreter("query", new QueryCommandInterpreter()); registerCommandInterpreter("showkb", new ShowKbCommandInterpreter()); } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java new file mode 100644 index 000000000..88ef8b6a2 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java @@ -0,0 +1,68 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.Argument; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; + +public class RetractCommandInterpreter implements CommandInterpreter { + + @Override + public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + int factCount = 0; + int ruleCount = 0; + for (Argument argument : command.getArguments()) { + if (argument.fromPositiveLiteral().isPresent()) { + PositiveLiteral literal = argument.fromPositiveLiteral().get(); + Fact fact; + try { + fact = Expressions.makeFact(literal.getPredicate(), literal.getArguments()); + } catch (IllegalArgumentException e) { + throw new CommandExecutionException("Literal " + literal.toString() + " is not a fact.", e); + } + factCount += interpreter.getKnowledgeBase().removeStatement(fact); + } else if (argument.fromRule().isPresent()) { + ruleCount += interpreter.getKnowledgeBase().removeStatement(argument.fromRule().get()); + } else { + throw new CommandExecutionException( + "Only facts and rules can be retracted. Encountered " + argument.toString()); + } + } + + interpreter.getOut().println("Retracted " + factCount + " fact(s) and " + ruleCount + " rule(s)."); + } + + @Override + public String getHelp(String commandName) { + return "Usage: @" + commandName + " ()+ .\n" + + " fact or rule: statement(s) to be removed from the knowledge base\n" + + "Reasoning needs to be invoked after finishing the removal of statements."; + } + + @Override + public String getSynopsis() { + return "remove facts and rules to the knowledge base"; + } + +} From d01c6edd60af49185083536b848266f5f0bdb6c7 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 15:35:09 +0200 Subject: [PATCH 0678/1003] formatting --- .../semanticweb/rulewerk/commands/AssertCommandInterpreter.java | 1 - 1 file changed, 1 deletion(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java index c83b2ae46..ce2df6346 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java @@ -31,7 +31,6 @@ public class AssertCommandInterpreter implements CommandInterpreter { @Override public void run(Command command, Interpreter interpreter) throws CommandExecutionException { - int factCount = 0; int ruleCount = 0; for (Argument argument : command.getArguments()) { From 0efce81f3f1cbacd05b903fc415461be29f1c31c Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 21 Aug 2020 15:51:17 +0200 Subject: [PATCH 0679/1003] added dummy Main class for main command with shell and materialize subcommands --- rulewerk-client/pom.xml | 2 +- .../rulewerk/client/picocli/Main.java | 62 ++++++++++++++++ .../client/shell/InteractiveShell.java | 72 +++++++++++++++++++ .../rulewerk/client/shell/Shell.java | 20 ++++++ 4 files changed, 155 insertions(+), 1 deletion(-) create mode 100644 rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java create mode 100644 rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java diff --git a/rulewerk-client/pom.xml b/rulewerk-client/pom.xml index 98bca4d59..760f4d472 100644 --- a/rulewerk-client/pom.xml +++ b/rulewerk-client/pom.xml @@ -94,7 +94,7 @@ - org.semanticweb.rulewerk.client.picocli.RulewerkClient + org.semanticweb.rulewerk.client.picocli.Main diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java new file mode 100644 index 000000000..3a2ac85bc --- /dev/null +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java @@ -0,0 +1,62 @@ +package org.semanticweb.rulewerk.client.picocli; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.semanticweb.rulewerk.client.shell.InteractiveShell; + +import picocli.CommandLine; +import picocli.CommandLine.Command; + +/** + * Dummy class with main method that is a command with subcommands shell and + * materialize + * + * @author Irina Dragoste + * + */ +@Command(name = "", description = "A command line client for Rulewerk.", subcommands = { InteractiveShell.class, + RulewerkClientMaterialize.class }) +public class Main { + + public static void main(final String[] args) throws IOException { + if (args.length == 0 || (args.length > 0 && args[0].equals("shell"))) { + final InteractiveShell interactiveShell = new InteractiveShell(); + interactiveShell.run(); + } else { + if (args[0].equals("materialize")) { + final CommandLine commandline = new CommandLine(new RulewerkClientMaterialize()); + commandline.execute(args); + } else { + if (!args[0].equals("help")) { + System.out.println("Invalid command."); + } + // TODO improve help + // TODO do we need to create a Help command? + (new CommandLine(new Main())).usage(System.out); + + } + } + + } + +} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java new file mode 100644 index 000000000..c39f0bb1b --- /dev/null +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java @@ -0,0 +1,72 @@ +package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; +import java.io.PrintStream; + +import org.jline.reader.LineReader; +import org.jline.terminal.Terminal; +import org.semanticweb.rulewerk.commands.Interpreter; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; +import org.semanticweb.rulewerk.parser.ParserConfiguration; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; + +import picocli.CommandLine.Command; + +@Command(name = "shell", description = "An interactive shell for Rulewerk. The default command.") +public class InteractiveShell +//implements Runnable +{ + +// @Override + public void run() throws IOException { + + final Terminal terminal = DefaultConfiguration.buildTerminal(); + final Interpreter interpreter = initializeInterpreter(terminal); + + final Shell shell = new Shell(interpreter); + + final LineReader lineReader = DefaultConfiguration.buildLineReader(terminal); + final PromptProvider promptProvider = DefaultConfiguration.buildPromptProvider(); + final CommandReader commandReader = new CommandReader(lineReader, promptProvider, interpreter); + + shell.run(commandReader); + + } + + static Interpreter initializeInterpreter(final Terminal terminal) { + // FIXME connect terminal writer +// final PrintStream out = terminal.writer().; + final PrintStream out = System.out; + + // TODO reasoner initial KB from args + final KnowledgeBase knowledgeBase = new KnowledgeBase(); + final Reasoner reasoner = new VLogReasoner(knowledgeBase); + final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + final Interpreter interpreter = new Interpreter(reasoner, out, parserConfiguration); + + return interpreter; + } + +} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index 2555f3d50..3075657ea 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -1,5 +1,25 @@ package org.semanticweb.rulewerk.client.shell; +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter; import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter.ExitCommandName; import org.semanticweb.rulewerk.commands.CommandExecutionException; From a6149e6d3aec7badc88fd64f31500a33d2584e51 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 21 Aug 2020 15:58:13 +0200 Subject: [PATCH 0680/1003] fix compile error --- .../client/shell/InteractiveShell.java | 138 +++++++++--------- 1 file changed, 66 insertions(+), 72 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java index c39f0bb1b..154861f30 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java @@ -1,72 +1,66 @@ -package org.semanticweb.rulewerk.client.shell; - -/*- - * #%L - * Rulewerk Client - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.io.IOException; -import java.io.PrintStream; - -import org.jline.reader.LineReader; -import org.jline.terminal.Terminal; -import org.semanticweb.rulewerk.commands.Interpreter; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.Reasoner; -import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; -import org.semanticweb.rulewerk.parser.ParserConfiguration; -import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; - -import picocli.CommandLine.Command; - -@Command(name = "shell", description = "An interactive shell for Rulewerk. The default command.") -public class InteractiveShell -//implements Runnable -{ - -// @Override - public void run() throws IOException { - - final Terminal terminal = DefaultConfiguration.buildTerminal(); - final Interpreter interpreter = initializeInterpreter(terminal); - - final Shell shell = new Shell(interpreter); - - final LineReader lineReader = DefaultConfiguration.buildLineReader(terminal); - final PromptProvider promptProvider = DefaultConfiguration.buildPromptProvider(); - final CommandReader commandReader = new CommandReader(lineReader, promptProvider, interpreter); - - shell.run(commandReader); - - } - - static Interpreter initializeInterpreter(final Terminal terminal) { - // FIXME connect terminal writer -// final PrintStream out = terminal.writer().; - final PrintStream out = System.out; - - // TODO reasoner initial KB from args - final KnowledgeBase knowledgeBase = new KnowledgeBase(); - final Reasoner reasoner = new VLogReasoner(knowledgeBase); - final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); - final Interpreter interpreter = new Interpreter(reasoner, out, parserConfiguration); - - return interpreter; - } - -} +package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.jline.reader.LineReader; +import org.jline.terminal.Terminal; +import org.semanticweb.rulewerk.commands.Interpreter; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; +import org.semanticweb.rulewerk.parser.ParserConfiguration; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; + +import picocli.CommandLine.Command; + +@Command(name = "shell", description = "An interactive shell for Rulewerk. The default command.") +public class InteractiveShell +//implements Runnable +{ + +// @Override + public void run() throws IOException { + + final Terminal terminal = DefaultConfiguration.buildTerminal(); + final Interpreter interpreter = initializeInterpreter(terminal); + + final Shell shell = new Shell(interpreter); + + final LineReader lineReader = DefaultConfiguration.buildLineReader(terminal); + final PromptProvider promptProvider = DefaultConfiguration.buildPromptProvider(); + final CommandReader commandReader = new CommandReader(lineReader, promptProvider, interpreter); + + shell.run(commandReader); + + } + + static Interpreter initializeInterpreter(final Terminal terminal) { + final KnowledgeBase knowledgeBase = new KnowledgeBase(); + final Reasoner reasoner = new VLogReasoner(knowledgeBase); + final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + final Interpreter interpreter = new Interpreter(reasoner, terminal.writer(), parserConfiguration); + + return interpreter; + } + +} From 8f6a3b9ec5dda72c308cbfe683a3533fd341993c Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 16:20:05 +0200 Subject: [PATCH 0681/1003] avoid Java 11 feature --- .../semanticweb/rulewerk/core/reasoner/KnowledgeBase.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java index 40cb247ef..b9f1d3ee9 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -22,10 +22,11 @@ import java.io.File; import java.io.FileInputStream; -import java.io.FileWriter; +import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.io.OutputStreamWriter; import java.io.Writer; import java.nio.charset.StandardCharsets; import java.util.ArrayList; @@ -617,7 +618,7 @@ public void writeKnowledgeBase(Writer writer) throws IOException { */ @Deprecated public void writeKnowledgeBase(String filePath) throws IOException { - try (FileWriter writer = new FileWriter(filePath, StandardCharsets.UTF_8)) { + try (Writer writer = new OutputStreamWriter(new FileOutputStream(filePath), StandardCharsets.UTF_8)) { this.writeKnowledgeBase(writer); } } From 10034af3e3d4dd62a2a9523c57bb5256c44982f6 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 16:42:59 +0200 Subject: [PATCH 0682/1003] improve @help format --- .../rulewerk/commands/HelpCommandInterpreter.java | 12 +++++++++--- .../semanticweb/rulewerk/commands/Interpreter.java | 4 ++-- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java index 94b687195..33309e837 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java @@ -28,11 +28,17 @@ public class HelpCommandInterpreter implements CommandInterpreter { @Override public void run(Command command, Interpreter interpreter) throws CommandExecutionException { if (command.getArguments().size() == 0) { - interpreter.getOut().println("Available commands:"); + int maxLength = 0; for (String commandName : interpreter.commandInterpreters.keySet()) { - interpreter.getOut().println( - " @" + commandName + ": " + interpreter.commandInterpreters.get(commandName).getSynopsis()); + maxLength = (commandName.length() > maxLength) ? commandName.length() : maxLength; } + final int padLength = maxLength + 1; + + interpreter.getOut().println("Available commands:"); + interpreter.commandInterpreters.forEach((commandName, commandForName) -> { + interpreter.getOut().println(" @" + String.format("%1$-" + padLength + "s", commandName) + ": " + + commandForName.getSynopsis()); + }); interpreter.getOut().println(); interpreter.getOut() .println("For more information on any command, use @" + command.getName() + " [command name]."); diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index 1dd005700..402ad8e29 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -25,7 +25,7 @@ import java.io.PrintWriter; import java.nio.charset.StandardCharsets; -import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map.Entry; @@ -47,7 +47,7 @@ public class Interpreter { final PrintWriter out; final ParserConfiguration parserConfiguration; - final HashMap commandInterpreters = new HashMap<>(); + final LinkedHashMap commandInterpreters = new LinkedHashMap<>(); public Interpreter(Reasoner reasoner, PrintWriter out, ParserConfiguration parserConfiguration) { this.reasoner = reasoner; From bb1e429d6140afc3c191591d422ee272105568ae Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 16:43:09 +0200 Subject: [PATCH 0683/1003] show welcome message --- .../java/org/semanticweb/rulewerk/client/shell/Shell.java | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index 4c1e9daec..d817e21ee 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -42,6 +42,8 @@ public Shell(final Interpreter interpreter) { } public void run(final CommandReader commandReader) { + printWelcome(); + running = true; while (running) { final Command command; @@ -68,6 +70,11 @@ public void run(final CommandReader commandReader) { public void exitShell() { this.running = false; } + + private void printWelcome() { + interpreter.getOut().println("Welcome to the Rulewerk interactive shell."); + interpreter.getOut().println("For further information, type @help."); + } // @Override // public void handleResult(final Object result) { From 63862dcd34c5b89109a3eb5d634ffac9804e4fca Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 21 Aug 2020 17:02:16 +0200 Subject: [PATCH 0684/1003] remove deprecated class RulewerkApplication. InteractiveShell can be sued --- .../rulewerk/client/picocli/Main.java | 123 +++++++++--------- .../client/shell/InteractiveShell.java | 6 +- .../client/shell/RulewerkApplication.java | 62 --------- 3 files changed, 66 insertions(+), 125 deletions(-) delete mode 100644 rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java index 3a2ac85bc..e75ef7eb0 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java @@ -1,62 +1,61 @@ -package org.semanticweb.rulewerk.client.picocli; - -/*- - * #%L - * Rulewerk Client - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.io.IOException; - -import org.semanticweb.rulewerk.client.shell.InteractiveShell; - -import picocli.CommandLine; -import picocli.CommandLine.Command; - -/** - * Dummy class with main method that is a command with subcommands shell and - * materialize - * - * @author Irina Dragoste - * - */ -@Command(name = "", description = "A command line client for Rulewerk.", subcommands = { InteractiveShell.class, - RulewerkClientMaterialize.class }) -public class Main { - - public static void main(final String[] args) throws IOException { - if (args.length == 0 || (args.length > 0 && args[0].equals("shell"))) { - final InteractiveShell interactiveShell = new InteractiveShell(); - interactiveShell.run(); - } else { - if (args[0].equals("materialize")) { - final CommandLine commandline = new CommandLine(new RulewerkClientMaterialize()); - commandline.execute(args); - } else { - if (!args[0].equals("help")) { - System.out.println("Invalid command."); - } - // TODO improve help - // TODO do we need to create a Help command? - (new CommandLine(new Main())).usage(System.out); - - } - } - - } - -} +package org.semanticweb.rulewerk.client.picocli; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.semanticweb.rulewerk.client.shell.InteractiveShell; + +import picocli.CommandLine; +import picocli.CommandLine.Command; + +/** + * Dummy class with main method that is a command with subcommands shell and + * materialize + * + * @author Irina Dragoste + * + */ +@Command(name = "", description = "A command line client for Rulewerk.", subcommands = { InteractiveShell.class, + RulewerkClientMaterialize.class }) +public class Main { + + public static void main(final String[] args) throws IOException { + if (args.length == 0 || (args.length > 0 && args[0].equals("shell"))) { + InteractiveShell.run(); + } else { + if (args[0].equals("materialize")) { + final CommandLine commandline = new CommandLine(new RulewerkClientMaterialize()); + commandline.execute(args); + } else { + if (!args[0].equals("help")) { + System.out.println("Invalid command."); + } + // TODO improve help + // TODO do we need to create a Help command? + (new CommandLine(new Main())).usage(System.out); + + } + } + + } + +} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java index 154861f30..63563a837 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java @@ -38,8 +38,12 @@ public class InteractiveShell //implements Runnable { + public static void main(final String[] args) throws IOException { + run(); + } + // @Override - public void run() throws IOException { + public static void run() throws IOException { final Terminal terminal = DefaultConfiguration.buildTerminal(); final Interpreter interpreter = initializeInterpreter(terminal); diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java deleted file mode 100644 index 186dad027..000000000 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java +++ /dev/null @@ -1,62 +0,0 @@ -package org.semanticweb.rulewerk.client.shell; - -/*- - * #%L - * Rulewerk Client - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.io.IOException; - -import org.jline.reader.LineReader; -import org.jline.terminal.Terminal; -import org.semanticweb.rulewerk.commands.Interpreter; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.Reasoner; -import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; -import org.semanticweb.rulewerk.parser.ParserConfiguration; -import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; - -public class RulewerkApplication { - - public static void main(final String[] args) throws IOException { - final Terminal terminal = DefaultConfiguration.buildTerminal(); - final Interpreter interpreter = initializeInterpreter(terminal); - - final Shell shell = new Shell(interpreter); - - final LineReader lineReader = DefaultConfiguration.buildLineReader(terminal); - final PromptProvider promptProvider = DefaultConfiguration.buildPromptProvider(); - final CommandReader commandReader = new CommandReader(lineReader, promptProvider, interpreter); - - shell.run(commandReader); - } - - private static Interpreter initializeInterpreter(Terminal terminal) { - // FIXME connect terminal writer -// final PrintStream out = terminal.writer().; - - // TODO reasoner initial KB from args - final KnowledgeBase knowledgeBase = new KnowledgeBase(); - final Reasoner reasoner = new VLogReasoner(knowledgeBase); - final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); - final Interpreter interpreter = new Interpreter(reasoner, terminal.writer(), parserConfiguration); - - return interpreter; - } - -} From e99165907652e6d813efe8f459217fa4db6867a9 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 21 Aug 2020 17:08:48 +0200 Subject: [PATCH 0685/1003] removed unused dependency --- rulewerk-client/pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rulewerk-client/pom.xml b/rulewerk-client/pom.xml index 760f4d472..f05da9f94 100644 --- a/rulewerk-client/pom.xml +++ b/rulewerk-client/pom.xml @@ -68,11 +68,11 @@ jansi ${jansi.version} - + From bc9be81af4350a77e3babc17537acd356cab14c9 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 17:45:10 +0200 Subject: [PATCH 0686/1003] support pretty printing --- .../client/shell/InteractiveShell.java | 3 +- .../client/shell/RulewerkApplication.java | 2 +- .../rulewerk/client/shell/Shell.java | 22 +++--- .../client/shell/TerminalStyledPrinter.java | 72 +++++++++++++++++++ .../commands/ExitCommandInterpreter.java | 2 +- .../commands/AssertCommandInterpreter.java | 2 +- .../commands/HelpCommandInterpreter.java | 21 +++--- .../rulewerk/commands/Interpreter.java | 32 +++++++-- .../commands/QueryCommandInterpreter.java | 4 +- .../commands/ReasonCommandInterpreter.java | 6 +- .../RemoveSourceCommandInterpreter.java | 6 +- .../commands/RetractCommandInterpreter.java | 2 +- .../commands/ShowKbCommandInterpreter.java | 2 +- .../commands/SimpleStyledPrinter.java | 70 ++++++++++++++++++ .../rulewerk/commands/StyledPrinter.java | 39 ++++++++++ 15 files changed, 246 insertions(+), 39 deletions(-) create mode 100644 rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/TerminalStyledPrinter.java create mode 100644 rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SimpleStyledPrinter.java create mode 100644 rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/StyledPrinter.java diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java index 154861f30..283be81e8 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java @@ -58,7 +58,8 @@ static Interpreter initializeInterpreter(final Terminal terminal) { final KnowledgeBase knowledgeBase = new KnowledgeBase(); final Reasoner reasoner = new VLogReasoner(knowledgeBase); final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); - final Interpreter interpreter = new Interpreter(reasoner, terminal.writer(), parserConfiguration); + final Interpreter interpreter = new Interpreter(reasoner, new TerminalStyledPrinter(terminal), + parserConfiguration); return interpreter; } diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java index 186dad027..5df9d5743 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java @@ -54,7 +54,7 @@ private static Interpreter initializeInterpreter(Terminal terminal) { final KnowledgeBase knowledgeBase = new KnowledgeBase(); final Reasoner reasoner = new VLogReasoner(knowledgeBase); final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); - final Interpreter interpreter = new Interpreter(reasoner, terminal.writer(), parserConfiguration); + final Interpreter interpreter = new Interpreter(reasoner, new TerminalStyledPrinter(terminal), parserConfiguration); return interpreter; } diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index d817e21ee..9a2cd08d0 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -30,6 +30,7 @@ public class Shell { private final Interpreter interpreter; + boolean running; public Shell(final Interpreter interpreter) { @@ -43,14 +44,14 @@ public Shell(final Interpreter interpreter) { public void run(final CommandReader commandReader) { printWelcome(); - + running = true; while (running) { final Command command; try { command = commandReader.readCommand(); } catch (final Exception e) { - interpreter.getOut().println("Unexpected error: " + e.getMessage()); + interpreter.getWriter().println("Unexpected error: " + e.getMessage()); e.printStackTrace(); continue; } @@ -59,21 +60,26 @@ public void run(final CommandReader commandReader) { try { this.interpreter.runCommand(command); } catch (final CommandExecutionException e) { - interpreter.getOut().println("Error: " + e.getMessage()); + interpreter.getWriter().println("Error: " + e.getMessage()); } } } - interpreter.getOut().println("Rulewerk shell is stopped. Bye."); - interpreter.getOut().flush(); + interpreter.printSection("Existing Rulewerk shell ... bye.\n\n"); + interpreter.getWriter().flush(); } public void exitShell() { this.running = false; } - + private void printWelcome() { - interpreter.getOut().println("Welcome to the Rulewerk interactive shell."); - interpreter.getOut().println("For further information, type @help."); + interpreter.printNormal("\n"); + interpreter.printSection("Welcome to the Rulewerk interactive shell.\n"); + interpreter.printNormal("For further information, type "); + interpreter.printCode("@help."); + interpreter.printNormal(" To quit, type "); + interpreter.printCode("@exit.\n"); + interpreter.printNormal("\n"); } // @Override diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/TerminalStyledPrinter.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/TerminalStyledPrinter.java new file mode 100644 index 000000000..8e77422f5 --- /dev/null +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/TerminalStyledPrinter.java @@ -0,0 +1,72 @@ +package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.PrintWriter; + +import org.jline.terminal.Terminal; +import org.jline.utils.AttributedString; +import org.jline.utils.AttributedStyle; +import org.semanticweb.rulewerk.commands.StyledPrinter; + +public class TerminalStyledPrinter implements StyledPrinter { + + final Terminal terminal; + + public TerminalStyledPrinter(final Terminal terminal) { + this.terminal = terminal; + } + + @Override + public void printNormal(String string) { + printStyled(string, AttributedStyle.DEFAULT); + } + + @Override + public void printSection(String string) { + printStyled(string, AttributedStyle.DEFAULT.bold()); + } + + @Override + public void printEmph(String string) { + printStyled(string, AttributedStyle.DEFAULT.bold()); + } + + @Override + public void printCode(String string) { + printStyled(string, AttributedStyle.DEFAULT.foreground(AttributedStyle.YELLOW).bold()); + } + + @Override + public void printImportant(String string) { + printStyled(string, AttributedStyle.DEFAULT.foreground(AttributedStyle.RED)); + } + + @Override + public PrintWriter getWriter() { + return terminal.writer(); + } + + private void printStyled(String string, AttributedStyle attributedStyle) { + AttributedString attributedString = new AttributedString(string, attributedStyle); + getWriter().print(attributedString.toAnsi(terminal)); + } +} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java index 1b7dc4d4c..5c2ac4c83 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java @@ -33,7 +33,7 @@ public class ExitCommandInterpreter implements CommandInterpreter { public static enum ExitCommandName { - exit, quit; + exit; public static boolean isExitCommand(final String commandName) { for(final ExitCommandName name: values()) { diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java index ce2df6346..99e1c90e7 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java @@ -53,7 +53,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } } - interpreter.getOut().println("Asserted " + factCount + " fact(s) and " + ruleCount + " rule(s)."); + interpreter.getWriter().println("Asserted " + factCount + " fact(s) and " + ruleCount + " rule(s)."); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java index 33309e837..a06abf369 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java @@ -34,26 +34,25 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } final int padLength = maxLength + 1; - interpreter.getOut().println("Available commands:"); + interpreter.printSection("Available commands:\n"); interpreter.commandInterpreters.forEach((commandName, commandForName) -> { - interpreter.getOut().println(" @" + String.format("%1$-" + padLength + "s", commandName) + ": " - + commandForName.getSynopsis()); + interpreter.printCode(" @" + String.format("%1$-" + padLength + "s", commandName)); + interpreter.printNormal(": " + commandForName.getSynopsis() + "\n"); }); - interpreter.getOut().println(); - interpreter.getOut() - .println("For more information on any command, use @" + command.getName() + " [command name]."); + interpreter.printNormal("\nFor more information on any command, use "); + interpreter.printCode("@" + command.getName() + " [command name].\n"); } else if (command.getArguments().size() == 1 && command.getArguments().get(0).fromTerm().isPresent() && command.getArguments().get(0).fromTerm().get().getType() == TermType.ABSTRACT_CONSTANT) { String helpCommand = command.getArguments().get(0).fromTerm().get().getName(); if (interpreter.commandInterpreters.containsKey(helpCommand)) { - interpreter.getOut().println( - "@" + helpCommand + ": " + interpreter.commandInterpreters.get(helpCommand).getSynopsis()); - interpreter.getOut().println(interpreter.commandInterpreters.get(helpCommand).getHelp(helpCommand)); + interpreter.printCode("@" + helpCommand); + interpreter.printNormal(": " + interpreter.commandInterpreters.get(helpCommand).getSynopsis()); + interpreter.printNormal(interpreter.commandInterpreters.get(helpCommand).getHelp(helpCommand)); } else { - interpreter.getOut().println("Command '" + helpCommand + "' not known."); + interpreter.printNormal("Command '" + helpCommand + "' not known."); } } else { - interpreter.getOut().println(getHelp(command.getName())); + interpreter.printNormal(getHelp(command.getName())); } } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index 402ad8e29..56843bad5 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -2,6 +2,7 @@ import java.io.ByteArrayInputStream; import java.io.InputStream; +import java.io.PrintWriter; /*- * #%L @@ -23,7 +24,6 @@ * #L% */ -import java.io.PrintWriter; import java.nio.charset.StandardCharsets; import java.util.LinkedHashMap; import java.util.List; @@ -44,14 +44,14 @@ public class Interpreter { final Reasoner reasoner; - final PrintWriter out; + final StyledPrinter printer; final ParserConfiguration parserConfiguration; final LinkedHashMap commandInterpreters = new LinkedHashMap<>(); - public Interpreter(Reasoner reasoner, PrintWriter out, ParserConfiguration parserConfiguration) { + public Interpreter(Reasoner reasoner, StyledPrinter printer, ParserConfiguration parserConfiguration) { this.reasoner = reasoner; - this.out = out; + this.printer = printer; this.parserConfiguration = parserConfiguration; registerDefaultCommandInterpreters(); } @@ -115,8 +115,28 @@ public ParserConfiguration getParserConfiguration() { return parserConfiguration; } - public PrintWriter getOut() { - return out; + public PrintWriter getWriter() { + return printer.getWriter(); + } + + public void printNormal(String string) { + printer.printNormal(string); + } + + public void printSection(String string) { + printer.printSection(string); + } + + public void printEmph(String string) { + printer.printEmph(string); + } + + public void printCode(String string) { + printer.printCode(string); + } + + public void printImportant(String string) { + printer.printImportant(string); } private void registerDefaultCommandInterpreters() { diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java index bc8f69056..5bead8d3f 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java @@ -64,11 +64,11 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio try (final QueryResultIterator answers = interpreter.getReasoner().answerQuery(literal, true)) { int count = 0; while (count != limit && answers.hasNext()) { - interpreter.getOut().println(" " + answers.next()); + interpreter.getWriter().println(" " + answers.next()); count++; } timer.stop(); - interpreter.getOut().println(count + " result(s) in " + timer.getTotalCpuTime() / 1000000 + interpreter.getWriter().println(count + " result(s) in " + timer.getTotalCpuTime() / 1000000 + "ms. Results are " + answers.getCorrectness() + "."); } } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java index 07d600d71..fe0b9f580 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java @@ -34,8 +34,8 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio throw new CommandExecutionException("This command supports no arguments."); } - interpreter.getOut().println("Loading and materializing inferences ..."); - interpreter.getOut().flush(); + interpreter.getWriter().println("Loading and materializing inferences ..."); + interpreter.getWriter().flush(); Timer timer = new Timer("reasoning"); timer.start(); @@ -45,7 +45,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio throw new CommandExecutionException(e.getMessage(), e); } timer.stop(); - interpreter.getOut().println("... finished in " + timer.getTotalCpuTime() / 1000000 + "ms."); + interpreter.getWriter().println("... finished in " + timer.getTotalCpuTime() / 1000000 + "ms."); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java index 8577d9e7a..8da1513a8 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java @@ -47,9 +47,9 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio if (dataSource != null) { DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, dataSource); if (interpreter.getKnowledgeBase().removeStatement(dataSourceDeclaration) > 0) { - interpreter.getOut().println("Removed specified data source declaration."); + interpreter.getWriter().println("Removed specified data source declaration."); } else { - interpreter.getOut().println("Specified data source declaration not found in knowledge base."); + interpreter.getWriter().println("Specified data source declaration not found in knowledge base."); } } else { int count = 0; @@ -60,7 +60,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio count++; } } - interpreter.getOut().println("Removed " + count + " matching data source declaration(s)."); + interpreter.getWriter().println("Removed " + count + " matching data source declaration(s)."); } } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java index 88ef8b6a2..5680ae27b 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java @@ -50,7 +50,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } } - interpreter.getOut().println("Retracted " + factCount + " fact(s) and " + ruleCount + " rule(s)."); + interpreter.getWriter().println("Retracted " + factCount + " fact(s) and " + ruleCount + " rule(s)."); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java index 2927d4e34..b854e0b4d 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java @@ -30,7 +30,7 @@ public class ShowKbCommandInterpreter implements CommandInterpreter { public void run(Command command, Interpreter interpreter) throws CommandExecutionException { Interpreter.validateArgumentCount(command, 0); try { - interpreter.getKnowledgeBase().writeKnowledgeBase(interpreter.getOut()); + interpreter.getKnowledgeBase().writeKnowledgeBase(interpreter.getWriter()); } catch (IOException e) { throw new CommandExecutionException(e.getMessage(), e); } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SimpleStyledPrinter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SimpleStyledPrinter.java new file mode 100644 index 000000000..3c91a2218 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SimpleStyledPrinter.java @@ -0,0 +1,70 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.PrintWriter; + +/** + * Simple implementation of {@link StyledPrinter} based on an arbitrary + * PrintWriter without any styling. + * + * @author Markus Kroetzsch + * + */ +public class SimpleStyledPrinter implements StyledPrinter { + + final PrintWriter printWriter; + + public SimpleStyledPrinter(final PrintWriter printWriter) { + this.printWriter = printWriter; + } + + @Override + public void printNormal(String string) { + printWriter.print(string); + } + + @Override + public void printSection(String string) { + printWriter.print(string); + } + + @Override + public void printEmph(String string) { + printWriter.print(string); + } + + @Override + public void printCode(String string) { + printWriter.print(string); + } + + @Override + public void printImportant(String string) { + printWriter.print(string); + } + + @Override + public PrintWriter getWriter() { + return printWriter; + } + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/StyledPrinter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/StyledPrinter.java new file mode 100644 index 000000000..a3d73df78 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/StyledPrinter.java @@ -0,0 +1,39 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.PrintWriter; + +public interface StyledPrinter { + + void printNormal(String string); + + void printSection(String string); + + void printEmph(String string); + + void printCode(String string); + + void printImportant(String string); + + PrintWriter getWriter(); + +} From a0b3a7d2ca7d6c9bfbc6b7fa5db9c6a2e65eda16 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 17:57:48 +0200 Subject: [PATCH 0687/1003] fix formatting for command help --- .../rulewerk/commands/HelpCommandInterpreter.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java index a06abf369..292a50b2a 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java @@ -46,10 +46,10 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio String helpCommand = command.getArguments().get(0).fromTerm().get().getName(); if (interpreter.commandInterpreters.containsKey(helpCommand)) { interpreter.printCode("@" + helpCommand); - interpreter.printNormal(": " + interpreter.commandInterpreters.get(helpCommand).getSynopsis()); - interpreter.printNormal(interpreter.commandInterpreters.get(helpCommand).getHelp(helpCommand)); + interpreter.printNormal(": " + interpreter.commandInterpreters.get(helpCommand).getSynopsis() + "\n"); + interpreter.printNormal(interpreter.commandInterpreters.get(helpCommand).getHelp(helpCommand) + "\n"); } else { - interpreter.printNormal("Command '" + helpCommand + "' not known."); + interpreter.printNormal("Command '" + helpCommand + "' not known.\n"); } } else { interpreter.printNormal(getHelp(command.getName())); From eaa98c8648014c8ae00d5f5e8823d00e2eed2014 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 18:16:47 +0200 Subject: [PATCH 0688/1003] remove accidentally committed changes --- .../implementation/QueryResultImpl.java | 150 ------------------ 1 file changed, 150 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java index 5207b1087..57231434a 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java @@ -1,9 +1,5 @@ package org.semanticweb.rulewerk.core.reasoner.implementation; -import java.util.Arrays; -import java.util.Collection; -import java.util.Iterator; - /* * #%L * Rulewerk Core Components @@ -25,7 +21,6 @@ */ import java.util.List; -import java.util.ListIterator; import org.semanticweb.rulewerk.core.model.api.QueryResult; import org.semanticweb.rulewerk.core.model.api.Term; @@ -38,157 +33,12 @@ */ public final class QueryResultImpl implements QueryResult { - static class ShallowTermList implements List { - - final Term[] data; - - public ShallowTermList(Term[] data) { - this.data = data; - } - - UnsupportedOperationException uoe() { return new UnsupportedOperationException(); } - - @Override public boolean add(Term e) { throw uoe(); } - @Override public boolean addAll(Collection c) { throw uoe(); } - @Override public void clear() { throw uoe(); } - @Override public boolean remove(Object o) { throw uoe(); } - @Override public boolean removeAll(Collection c) { throw uoe(); } - @Override public boolean retainAll(Collection c) { throw uoe(); } - @Override public void add(int index, Term element) { throw uoe(); } - @Override public boolean addAll(int index, Collection c) { throw uoe(); } - @Override public Term remove(int index) { throw uoe(); } - - @Override - public boolean contains(Object o) { - return indexOf(o) >= 0; - } - - @Override - public boolean containsAll(Collection arg0) { - // TODO Auto-generated method stub - return false; - } - - @Override - public Term get(int index) { - return data[index]; - } - - @Override - public int indexOf(Object o) { - for (int i = 0, s = size(); i < s; i++) { - if (get(i).equals(o)) { - return i; - } - } - return -1; - } - - @Override - public boolean isEmpty() { - return size() == 0; - } - - @Override - public Iterator iterator() { - // TODO Auto-generated method stub - return null; - } - - @Override - public int lastIndexOf(Object arg0) { - // TODO Auto-generated method stub - return 0; - } - - @Override - public ListIterator listIterator() { - // TODO Auto-generated method stub - return null; - } - - @Override - public ListIterator listIterator(int arg0) { - // TODO Auto-generated method stub - return null; - } - - @Override - public Term set(int arg0, Term arg1) { - throw uoe(); - } - - @Override - public int size() { - return data.length; - } - - @Override - public List subList(int arg0, int arg1) { - // TODO Auto-generated method stub - return null; - } - - @Override - public Object[] toArray() { - return Arrays.copyOf(data, data.length); - } - - @Override - @SuppressWarnings("unchecked") - public T[] toArray(T[] a) { - int size = data.length; - if (a.length < size) { - // Make a new array of a's runtime type, but my contents: - return (T[]) Arrays.copyOf(data, size, a.getClass()); - } - System.arraycopy(data, 0, a, 0, size); - if (a.length > size) { - a[size] = null; // null-terminate - } - return a; - } - - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - - if (!(o instanceof List)) { - return false; - } - - Iterator oit = ((List) o).iterator(); - for (int i = 0, s = size(); i < s; i++) { - if (!oit.hasNext() || !get(i).equals(oit.next())) { - return false; - } - } - return !oit.hasNext(); - } - - @Override - public int hashCode() { - int hash = 1; - for (int i = 0, s = size(); i < s; i++) { - hash = 31 * hash + get(i).hashCode(); - } - return hash; - } - } - private final List terms; public QueryResultImpl(List terms) { this.terms = terms; } - public static QueryResultImpl fromArray(Term[] terms) { - return new QueryResultImpl(new ShallowTermList(terms)); - } - @Override public List getTerms() { return this.terms; From 0b6de9a700128f0ccc96b1544e139418bd45f803 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 18:24:30 +0200 Subject: [PATCH 0689/1003] configure logging to avoid error message --- .../rulewerk/client/picocli/Main.java | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java index e75ef7eb0..15a0d259f 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java @@ -22,6 +22,10 @@ import java.io.IOException; +import org.apache.log4j.ConsoleAppender; +import org.apache.log4j.Level; +import org.apache.log4j.Logger; +import org.apache.log4j.PatternLayout; import org.semanticweb.rulewerk.client.shell.InteractiveShell; import picocli.CommandLine; @@ -39,6 +43,8 @@ public class Main { public static void main(final String[] args) throws IOException { + configureLogging(); + if (args.length == 0 || (args.length > 0 && args[0].equals("shell"))) { InteractiveShell.run(); } else { @@ -57,5 +63,19 @@ public static void main(final String[] args) throws IOException { } } + + public static void configureLogging() { + // Create the appender that will write log messages to the console. + final ConsoleAppender consoleAppender = new ConsoleAppender(); + // Define the pattern of log messages. + // Insert the string "%c{1}:%L" to also show class name and line. + final String pattern = "%d{yyyy-MM-dd HH:mm:ss} %-5p - %m%n"; + consoleAppender.setLayout(new PatternLayout(pattern)); + // Change to Level.ERROR for fewer messages: + consoleAppender.setThreshold(Level.FATAL); + + consoleAppender.activateOptions(); + Logger.getRootLogger().addAppender(consoleAppender); + } } From 0d7828cc3fe15e16d59098f303b00b72bedfbdc1 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 21 Aug 2020 18:52:41 +0200 Subject: [PATCH 0690/1003] add StringCompleter for command names --- .../rulewerk/client/shell/CommandReader.java | 199 ++- .../client/shell/DefaultConfiguration.java | 135 ++- .../client/shell/InteractiveShell.java | 2 +- .../rulewerk/commands/Interpreter.java | 407 ++++--- .../core/model/implementation/Serializer.java | 1073 +++++++++-------- 5 files changed, 939 insertions(+), 877 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java index 9877ea6fa..f3aeb72e6 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java @@ -1,100 +1,99 @@ -package org.semanticweb.rulewerk.client.shell; - -/*- - * #%L - * Rulewerk Client - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import org.jline.reader.LineReader; -import org.jline.reader.UserInterruptException; -import org.jline.utils.AttributedString; -import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter; -import org.semanticweb.rulewerk.commands.Interpreter; -import org.semanticweb.rulewerk.core.model.api.Command; -import org.semanticweb.rulewerk.parser.ParsingException; - -public class CommandReader { - - private final LineReader lineReader; - private final PromptProvider promptProvider; - private final Interpreter interpreter; - - public CommandReader(final LineReader lineReader, final PromptProvider promptProvider, - final Interpreter interpreter) { - super(); // FIXME: there is no superclass? - this.lineReader = lineReader; - this.promptProvider = promptProvider; - this.interpreter = interpreter; - } - - /** - * Reads a command from the prompt and returns a corresponding {@link Command} - * object. If no command should be executed, null is returned. Some effort is - * made to interpret mistyped commands by adding @ and . before and after the - * input, if forgotten. - * - * @return command or null - */ - public Command readCommand() { - String readLine; - try { - final AttributedString prompt = this.promptProvider.getPrompt(); - readLine = this.lineReader.readLine(prompt.toAnsi(this.lineReader.getTerminal())); - } catch (final UserInterruptException e) { - if (e.getPartialLine().isEmpty()) { - // Exit request from user CTRL+C - return ExitCommandInterpreter.EXIT_COMMAND; - } else { - return null; // used as empty command - } - } - - readLine = readLine.trim(); - if ("".equals(readLine)) { - return null; - } - if (readLine.charAt(0) != '@') { - readLine = "@" + readLine; - } - if (readLine.charAt(readLine.length() - 1) != '.') { - readLine = readLine + "."; - } - - try { - return interpreter.parseCommand(readLine); - } catch (final ParsingException e) { - // FIXME do I need to flush terminal? - this.lineReader.getTerminal().writer() - .println("Error: " + e.getMessage() + "\n" + e.getCause().getMessage()); - return null; - } - } - -// /** -// * Sanitize the buffer input given the customizations applied to the JLine -// * parser (e.g. support for line continuations, etc.) -// */ -// static List sanitizeInput(List words) { -// words = words.stream().map(s -> s.replaceAll("^\\n+|\\n+$", "")) // CR at beginning/end of line introduced by -// // backslash continuation -// .map(s -> s.replaceAll("\\n+", " ")) // CR in middle of word introduced by return inside a quoted string -// .collect(Collectors.toList()); -// return words; -// } - -} +package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.jline.reader.LineReader; +import org.jline.reader.UserInterruptException; +import org.jline.utils.AttributedString; +import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter; +import org.semanticweb.rulewerk.commands.Interpreter; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class CommandReader { + + private final LineReader lineReader; + private final PromptProvider promptProvider; + private final Interpreter interpreter; + + public CommandReader(final LineReader lineReader, final PromptProvider promptProvider, + final Interpreter interpreter) { + this.lineReader = lineReader; + this.promptProvider = promptProvider; + this.interpreter = interpreter; + } + + /** + * Reads a command from the prompt and returns a corresponding {@link Command} + * object. If no command should be executed, null is returned. Some effort is + * made to interpret mistyped commands by adding @ and . before and after the + * input, if forgotten. + * + * @return command or null + */ + public Command readCommand() { + String readLine; + try { + final AttributedString prompt = this.promptProvider.getPrompt(); + readLine = this.lineReader.readLine(prompt.toAnsi(this.lineReader.getTerminal())); + } catch (final UserInterruptException e) { + if (e.getPartialLine().isEmpty()) { + // Exit request from user CTRL+C + return ExitCommandInterpreter.EXIT_COMMAND; + } else { + return null; // used as empty command + } + } + + readLine = readLine.trim(); + if ("".equals(readLine)) { + return null; + } + if (readLine.charAt(0) != '@') { + readLine = "@" + readLine; + } + if (readLine.charAt(readLine.length() - 1) != '.') { + readLine = readLine + "."; + } + + try { + return this.interpreter.parseCommand(readLine); + } catch (final ParsingException e) { + // FIXME do I need to flush terminal? + this.lineReader.getTerminal().writer() + .println("Error: " + e.getMessage() + "\n" + e.getCause().getMessage()); + return null; + } + } + +// /** +// * Sanitize the buffer input given the customizations applied to the JLine +// * parser (e.g. support for line continuations, etc.) +// */ +// static List sanitizeInput(List words) { +// words = words.stream().map(s -> s.replaceAll("^\\n+|\\n+$", "")) // CR at beginning/end of line introduced by +// // backslash continuation +// .map(s -> s.replaceAll("\\n+", " ")) // CR in middle of word introduced by return inside a quoted string +// .collect(Collectors.toList()); +// return words; +// } + +} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java index 2b4085ac5..4cdc16ef7 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java @@ -1,60 +1,75 @@ -package org.semanticweb.rulewerk.client.shell; - -/*- - * #%L - * Rulewerk Client - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.io.IOException; - -import org.jline.reader.LineReader; -import org.jline.reader.LineReaderBuilder; -import org.jline.terminal.Terminal; -import org.jline.terminal.TerminalBuilder; -import org.jline.utils.AttributedString; -import org.jline.utils.AttributedStyle; - -public final class DefaultConfiguration { - - private DefaultConfiguration() { - } - - public static PromptProvider buildPromptProvider() { - return () -> new AttributedString("rulewerk>", AttributedStyle.DEFAULT.foreground(AttributedStyle.YELLOW)); - } - - public static LineReader buildLineReader(final Terminal terminal) { - final LineReaderBuilder lineReaderBuilder = LineReaderBuilder.builder().terminal(terminal) - .appName("Rulewerk Shell") - // .expander(expander()) - // .completer(buildCompleter()) - // .history(buildHistory()) - // .highlighter(buildHighlighter()) - ; - - final LineReader lineReader = lineReaderBuilder.build(); - lineReader.unsetOpt(LineReader.Option.INSERT_TAB); // This allows completion on an empty buffer, rather than - // inserting a tab - return lineReader; - } - - public static Terminal buildTerminal() throws IOException { - return TerminalBuilder.builder().dumb(true).jansi(true).jna(false).system(true).build(); - } - -} +package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +import org.jline.reader.Completer; +import org.jline.reader.LineReader; +import org.jline.reader.LineReaderBuilder; +import org.jline.reader.impl.completer.StringsCompleter; +import org.jline.terminal.Terminal; +import org.jline.terminal.TerminalBuilder; +import org.jline.utils.AttributedString; +import org.jline.utils.AttributedStyle; +import org.semanticweb.rulewerk.commands.Interpreter; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; + +public final class DefaultConfiguration { + + private DefaultConfiguration() { + } + + public static PromptProvider buildPromptProvider() { + return () -> new AttributedString("rulewerk>", AttributedStyle.DEFAULT.foreground(AttributedStyle.YELLOW)); + } + + public static LineReader buildLineReader(final Terminal terminal, final Interpreter interpreter) { + final LineReaderBuilder lineReaderBuilder = LineReaderBuilder.builder().terminal(terminal) + .appName("Rulewerk Shell") + .completer(buildCompleter(interpreter)) + // .expander(expander()) + // .history(buildHistory()) + // .highlighter(buildHighlighter()) + ; + + final LineReader lineReader = lineReaderBuilder.build(); + lineReader.unsetOpt(LineReader.Option.INSERT_TAB); // This allows completion on an empty buffer, rather than + // inserting a tab + return lineReader; + } + + private static Completer buildCompleter(final Interpreter interpreter) { + final Set registeredCommandNames = interpreter.getRegisteredCommands(); + final List serializedCommandNames = registeredCommandNames.stream() + .map(commandName -> Serializer.getCommandName(commandName)) + .collect(Collectors.toList()); + return new StringsCompleter(serializedCommandNames); + } + + public static Terminal buildTerminal() throws IOException { + return TerminalBuilder.builder().dumb(true).jansi(true).jna(false).system(true).build(); + } + +} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java index a66fd7383..0bee3a90c 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java @@ -50,7 +50,7 @@ public static void run() throws IOException { final Shell shell = new Shell(interpreter); - final LineReader lineReader = DefaultConfiguration.buildLineReader(terminal); + final LineReader lineReader = DefaultConfiguration.buildLineReader(terminal, interpreter); final PromptProvider promptProvider = DefaultConfiguration.buildPromptProvider(); final CommandReader commandReader = new CommandReader(lineReader, promptProvider, interpreter); diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index 56843bad5..724b61271 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -1,201 +1,206 @@ -package org.semanticweb.rulewerk.commands; - -import java.io.ByteArrayInputStream; -import java.io.InputStream; -import java.io.PrintWriter; - -/*- - * #%L - * Rulewerk Core Components - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.nio.charset.StandardCharsets; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map.Entry; - -import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; -import org.semanticweb.rulewerk.core.model.api.Command; -import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; -import org.semanticweb.rulewerk.core.model.api.Terms; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.Reasoner; -import org.semanticweb.rulewerk.parser.ParserConfiguration; -import org.semanticweb.rulewerk.parser.ParsingException; -import org.semanticweb.rulewerk.parser.javacc.JavaCCParser; -import org.semanticweb.rulewerk.parser.javacc.ParseException; -import org.semanticweb.rulewerk.parser.javacc.TokenMgrError; - -public class Interpreter { - - final Reasoner reasoner; - final StyledPrinter printer; - final ParserConfiguration parserConfiguration; - - final LinkedHashMap commandInterpreters = new LinkedHashMap<>(); - - public Interpreter(Reasoner reasoner, StyledPrinter printer, ParserConfiguration parserConfiguration) { - this.reasoner = reasoner; - this.printer = printer; - this.parserConfiguration = parserConfiguration; - registerDefaultCommandInterpreters(); - } - - public void registerCommandInterpreter(String command, CommandInterpreter commandInterpreter) { - commandInterpreters.put(command, commandInterpreter); - } - - public void runCommands(List commands) throws CommandExecutionException { - for (Command command : commands) { - runCommand(command); - } - } - - public void runCommand(Command command) throws CommandExecutionException { - if (commandInterpreters.containsKey(command.getName())) { - try { - commandInterpreters.get(command.getName()).run(command, this); - } catch (Exception e) { - throw new CommandExecutionException(e.getMessage(), e); - } - } else { - throw new CommandExecutionException("Unknown command '" + command.getName() + "'"); - } - } - - public Command parseCommand(String commandString) throws ParsingException { - final InputStream inputStream = new ByteArrayInputStream(commandString.getBytes(StandardCharsets.UTF_8)); - final JavaCCParser localParser = new JavaCCParser(inputStream, "UTF-8"); - localParser.setParserConfiguration(parserConfiguration); - - // Copy prefixes from KB: - try { - localParser.getPrefixDeclarationRegistry().setBaseIri(reasoner.getKnowledgeBase().getBaseIri()); - for (Entry prefix : reasoner.getKnowledgeBase().getPrefixDeclarationRegistry()) { - localParser.getPrefixDeclarationRegistry().setPrefixIri(prefix.getKey(), prefix.getValue()); - } - } catch (PrefixDeclarationException e) { // unlikely! - throw new RuntimeException(e); - } - - Command result; - try { - result = localParser.command(); - localParser.ensureEndOfInput(); - } catch (ParseException | PrefixDeclarationException | TokenMgrError | RuntimeException e) { - throw new ParsingException("failed to parse command \"\"\"" + commandString + "\"\"\"", e); - } - return result; - } - - public Reasoner getReasoner() { - return reasoner; - } - - public KnowledgeBase getKnowledgeBase() { - return reasoner.getKnowledgeBase(); - } - - public ParserConfiguration getParserConfiguration() { - return parserConfiguration; - } - - public PrintWriter getWriter() { - return printer.getWriter(); - } - - public void printNormal(String string) { - printer.printNormal(string); - } - - public void printSection(String string) { - printer.printSection(string); - } - - public void printEmph(String string) { - printer.printEmph(string); - } - - public void printCode(String string) { - printer.printCode(string); - } - - public void printImportant(String string) { - printer.printImportant(string); - } - - private void registerDefaultCommandInterpreters() { - registerCommandInterpreter("help", new HelpCommandInterpreter()); - registerCommandInterpreter("load", new LoadCommandInterpreter()); - registerCommandInterpreter("assert", new AssertCommandInterpreter()); - registerCommandInterpreter("retract", new RetractCommandInterpreter()); - registerCommandInterpreter("addsource", new AddSourceCommandInterpreter()); - registerCommandInterpreter("delsource", new RemoveSourceCommandInterpreter()); - registerCommandInterpreter("setprefix", new SetPrefixCommandInterpreter()); - registerCommandInterpreter("reason", new ReasonCommandInterpreter()); - registerCommandInterpreter("query", new QueryCommandInterpreter()); - registerCommandInterpreter("showkb", new ShowKbCommandInterpreter()); - } - - /** - * Validate that the correct number of arguments was passed to a command. - * - * @param command Command to validate - * @param number expected number of parameters - * @throws CommandExecutionException if the number is not correct - */ - public static void validateArgumentCount(Command command, int number) throws CommandExecutionException { - if (command.getArguments().size() != number) { - throw new CommandExecutionException("This command requires exactly " + number + " argument(s), but " - + command.getArguments().size() + " were given."); - } - } - - private static CommandExecutionException getArgumentTypeError(int index, String expectedType, - String parameterName) { - return new CommandExecutionException( - "Argument at position " + index + " needs to be of type " + expectedType + " (" + parameterName + ")."); - } - - public static String extractStringArgument(Command command, int index, String parameterName) - throws CommandExecutionException { - try { - return Terms.extractString(command.getArguments().get(index).fromTerm() - .orElseThrow(() -> getArgumentTypeError(index, "string", parameterName))); - } catch (IllegalArgumentException e) { - throw getArgumentTypeError(index, "string", parameterName); - } - } - - public static String extractNameArgument(Command command, int index, String parameterName) - throws CommandExecutionException { - try { - return Terms.extractName(command.getArguments().get(index).fromTerm() - .orElseThrow(() -> getArgumentTypeError(index, "constant", parameterName))); - } catch (IllegalArgumentException e) { - throw getArgumentTypeError(index, "constant", parameterName); - } - } - - public static PositiveLiteral extractPositiveLiteralArgument(Command command, int index, String parameterName) - throws CommandExecutionException { - return command.getArguments().get(index).fromPositiveLiteral() - .orElseThrow(() -> getArgumentTypeError(index, "literal", parameterName)); - } - -} +package org.semanticweb.rulewerk.commands; + +import java.io.ByteArrayInputStream; +import java.io.InputStream; +import java.io.PrintWriter; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.nio.charset.StandardCharsets; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map.Entry; +import java.util.Set; + +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Terms; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParserConfiguration; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.javacc.JavaCCParser; +import org.semanticweb.rulewerk.parser.javacc.ParseException; +import org.semanticweb.rulewerk.parser.javacc.TokenMgrError; + +public class Interpreter { + + final Reasoner reasoner; + final StyledPrinter printer; + final ParserConfiguration parserConfiguration; + + final LinkedHashMap commandInterpreters = new LinkedHashMap<>(); + + public Interpreter(final Reasoner reasoner, final StyledPrinter printer, final ParserConfiguration parserConfiguration) { + this.reasoner = reasoner; + this.printer = printer; + this.parserConfiguration = parserConfiguration; + this.registerDefaultCommandInterpreters(); + } + + public void registerCommandInterpreter(final String command, final CommandInterpreter commandInterpreter) { + this.commandInterpreters.put(command, commandInterpreter); + } + + public Set getRegisteredCommands() { + return this.commandInterpreters.keySet(); + } + + public void runCommands(final List commands) throws CommandExecutionException { + for (final Command command : commands) { + this.runCommand(command); + } + } + + public void runCommand(final Command command) throws CommandExecutionException { + if (this.commandInterpreters.containsKey(command.getName())) { + try { + this.commandInterpreters.get(command.getName()).run(command, this); + } catch (final Exception e) { + throw new CommandExecutionException(e.getMessage(), e); + } + } else { + throw new CommandExecutionException("Unknown command '" + command.getName() + "'"); + } + } + + public Command parseCommand(final String commandString) throws ParsingException { + final InputStream inputStream = new ByteArrayInputStream(commandString.getBytes(StandardCharsets.UTF_8)); + final JavaCCParser localParser = new JavaCCParser(inputStream, "UTF-8"); + localParser.setParserConfiguration(this.parserConfiguration); + + // Copy prefixes from KB: + try { + localParser.getPrefixDeclarationRegistry().setBaseIri(this.reasoner.getKnowledgeBase().getBaseIri()); + for (final Entry prefix : this.reasoner.getKnowledgeBase().getPrefixDeclarationRegistry()) { + localParser.getPrefixDeclarationRegistry().setPrefixIri(prefix.getKey(), prefix.getValue()); + } + } catch (final PrefixDeclarationException e) { // unlikely! + throw new RuntimeException(e); + } + + Command result; + try { + result = localParser.command(); + localParser.ensureEndOfInput(); + } catch (ParseException | PrefixDeclarationException | TokenMgrError | RuntimeException e) { + throw new ParsingException("failed to parse command \"\"\"" + commandString + "\"\"\"", e); + } + return result; + } + + public Reasoner getReasoner() { + return this.reasoner; + } + + public KnowledgeBase getKnowledgeBase() { + return this.reasoner.getKnowledgeBase(); + } + + public ParserConfiguration getParserConfiguration() { + return this.parserConfiguration; + } + + public PrintWriter getWriter() { + return this.printer.getWriter(); + } + + public void printNormal(final String string) { + this.printer.printNormal(string); + } + + public void printSection(final String string) { + this.printer.printSection(string); + } + + public void printEmph(final String string) { + this.printer.printEmph(string); + } + + public void printCode(final String string) { + this.printer.printCode(string); + } + + public void printImportant(final String string) { + this.printer.printImportant(string); + } + + private void registerDefaultCommandInterpreters() { + this.registerCommandInterpreter("help", new HelpCommandInterpreter()); + this.registerCommandInterpreter("load", new LoadCommandInterpreter()); + this.registerCommandInterpreter("assert", new AssertCommandInterpreter()); + this.registerCommandInterpreter("retract", new RetractCommandInterpreter()); + this.registerCommandInterpreter("addsource", new AddSourceCommandInterpreter()); + this.registerCommandInterpreter("delsource", new RemoveSourceCommandInterpreter()); + this.registerCommandInterpreter("setprefix", new SetPrefixCommandInterpreter()); + this.registerCommandInterpreter("reason", new ReasonCommandInterpreter()); + this.registerCommandInterpreter("query", new QueryCommandInterpreter()); + this.registerCommandInterpreter("showkb", new ShowKbCommandInterpreter()); + } + + /** + * Validate that the correct number of arguments was passed to a command. + * + * @param command Command to validate + * @param number expected number of parameters + * @throws CommandExecutionException if the number is not correct + */ + public static void validateArgumentCount(final Command command, final int number) throws CommandExecutionException { + if (command.getArguments().size() != number) { + throw new CommandExecutionException("This command requires exactly " + number + " argument(s), but " + + command.getArguments().size() + " were given."); + } + } + + private static CommandExecutionException getArgumentTypeError(final int index, final String expectedType, + final String parameterName) { + return new CommandExecutionException( + "Argument at position " + index + " needs to be of type " + expectedType + " (" + parameterName + ")."); + } + + public static String extractStringArgument(final Command command, final int index, final String parameterName) + throws CommandExecutionException { + try { + return Terms.extractString(command.getArguments().get(index).fromTerm() + .orElseThrow(() -> getArgumentTypeError(index, "string", parameterName))); + } catch (final IllegalArgumentException e) { + throw getArgumentTypeError(index, "string", parameterName); + } + } + + public static String extractNameArgument(final Command command, final int index, final String parameterName) + throws CommandExecutionException { + try { + return Terms.extractName(command.getArguments().get(index).fromTerm() + .orElseThrow(() -> getArgumentTypeError(index, "constant", parameterName))); + } catch (final IllegalArgumentException e) { + throw getArgumentTypeError(index, "constant", parameterName); + } + } + + public static PositiveLiteral extractPositiveLiteralArgument(final Command command, final int index, final String parameterName) + throws CommandExecutionException { + return command.getArguments().get(index).fromPositiveLiteral() + .orElseThrow(() -> getArgumentTypeError(index, "literal", parameterName)); + } + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index 281c16cc6..7ae74f80b 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -1,515 +1,558 @@ -package org.semanticweb.rulewerk.core.model.implementation; - -/*- - * #%L - * Rulewerk Core Components - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.util.List; -import java.util.Map.Entry; -import java.util.function.Function; - -import org.semanticweb.rulewerk.core.model.api.AbstractConstant; -import org.semanticweb.rulewerk.core.model.api.Conjunction; -import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; -import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; -import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; -import org.semanticweb.rulewerk.core.model.api.Fact; -import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; -import org.semanticweb.rulewerk.core.model.api.Literal; -import org.semanticweb.rulewerk.core.model.api.NamedNull; -import org.semanticweb.rulewerk.core.model.api.Predicate; -import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; -import org.semanticweb.rulewerk.core.model.api.Rule; -import org.semanticweb.rulewerk.core.model.api.Term; -import org.semanticweb.rulewerk.core.model.api.UniversalVariable; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; - -/** - * A utility class with static methods to obtain the correct parsable string - * representation of the different data models. - * - * @author Ali Elhalawati - * - */ -public final class Serializer { - private static final String NEW_LINE = "\n"; - public static final String STATEMENT_SEPARATOR = " ."; - public static final String COMMA = ", "; - public static final String NEGATIVE_IDENTIFIER = "~"; - public static final String EXISTENTIAL_IDENTIFIER = "!"; - public static final String UNIVERSAL_IDENTIFIER = "?"; - public static final String NAMEDNULL_IDENTIFIER = "_:"; - public static final String OPENING_PARENTHESIS = "("; - public static final String CLOSING_PARENTHESIS = ")"; - public static final String OPENING_BRACKET = "["; - public static final String CLOSING_BRACKET = "]"; - public static final String RULE_SEPARATOR = " :- "; - public static final char AT = '@'; - public static final String DATA_SOURCE = "@source "; - public static final String BASE = "@base "; - public static final String PREFIX = "@prefix "; - public static final String CSV_FILE_DATA_SOURCE = "load-csv"; - public static final String RDF_FILE_DATA_SOURCE = "load-rdf"; - public static final String SPARQL_QUERY_RESULT_DATA_SOURCE = "sparql"; - public static final String DATA_SOURCE_SEPARATOR = ": "; - public static final String COLON = ":"; - public static final String DOUBLE_CARET = "^^"; - public static final char LESS_THAN = '<'; - public static final char MORE_THAN = '>'; - public static final char QUOTE = '"'; - - public static final String REGEX_DOUBLE = "^[-+]?[0-9]+[.]?[0-9]*([eE][-+]?[0-9]+)?$"; - public static final String REGEX_INTEGER = "^[-+]?\\d+$"; - public static final String REGEX_DECIMAL = "^(\\d*\\.)?\\d+$"; - public static final String REGEX_TRUE = "true"; - public static final String REGEX_FALSE = "false"; - - /** - * Constructor. - */ - private Serializer() { - - } - - /** - * Creates a String representation of a given {@link Rule}. - * - * @see Rule syntax - * @param rule a {@link Rule}. - * @return String representation corresponding to a given {@link Rule}. - * - */ - public static String getString(final Rule rule) { - return getString(rule.getHead()) + RULE_SEPARATOR + getString(rule.getBody()) + STATEMENT_SEPARATOR; - } - - /** - * Creates a String representation of a given {@link Conjunction}. - * - * @see Rule syntax - * @param conjunction a {@link Conjunction} - * @return String representation corresponding to a given {@link Conjunction}. - */ - public static String getString(final Conjunction conjunction) { - final StringBuilder stringBuilder = new StringBuilder(); - boolean first = true; - for (final Literal literal : conjunction.getLiterals()) { - if (first) { - first = false; - } else { - stringBuilder.append(COMMA); - } - stringBuilder.append(getString(literal)); - } - return stringBuilder.toString(); - } - - /** - * Creates a String representation of a given {@link Literal}. - * - * @see Rule syntax - * @param literal a {@link Literal} - * @return String representation corresponding to a given {@link Literal}. - */ - public static String getString(final Literal literal) { - final StringBuilder stringBuilder = new StringBuilder(""); - if (literal.isNegated()) { - stringBuilder.append(NEGATIVE_IDENTIFIER); - } - stringBuilder.append(getString(literal.getPredicate(), literal.getArguments())); - return stringBuilder.toString(); - } - - /** - * Creates a String representation of a given {@link Fact}. - * - * @see Rule syntax - * @param fact a {@link Fact} - * @return String representation corresponding to a given {@link Fact}. - */ - public static String getFactString(final Fact fact) { - return getString(fact) + STATEMENT_SEPARATOR; - } - - /** - * Creates a String representation of a given {@link AbstractConstant}. - * - * @see Rule syntax - * @param constant a {@link AbstractConstant} - * @param iriTransformer a function to transform IRIs with. - * @return String representation corresponding to a given - * {@link AbstractConstant}. - */ - public static String getString(final AbstractConstant constant, Function iriTransformer) { - return getIRIString(constant.getName(), iriTransformer); - } - - /** - * Creates a String representation of a given {@link AbstractConstant}. - * - * @see Rule syntax - * @param constant a {@link AbstractConstant} - * @return String representation corresponding to a given - * {@link AbstractConstant}. - */ - public static String getString(final AbstractConstant constant) { - return getIRIString(constant.getName()); - } - - /** - * Creates a String representation corresponding to the name of a given - * {@link LanguageStringConstant}. - * - * @see Rule syntax - * @param languageStringConstant a {@link LanguageStringConstant} - * @return String representation corresponding to the name of a given - * {@link LanguageStringConstant}. - */ - public static String getConstantName(final LanguageStringConstant languageStringConstant) { - return getString(languageStringConstant.getString()) + AT + languageStringConstant.getLanguageTag(); - } - - /** - * Creates a String representation corresponding to the given - * {@link DatatypeConstant}. For datatypes that have specialised lexical - * representations (i.e., xsd:String, xsd:Decimal, xsd:Integer, and xsd:Double), - * this representation is returned, otherwise the result is a generic literal - * with full datatype IRI. - * - * examples: - *
      - *
    • {@code "string"^^xsd:String} results in {@code "string"},
    • - *
    • {@code "23.0"^^xsd:Decimal} results in {@code 23.0},
    • - *
    • {@code "42"^^xsd:Integer} results in {@code 42},
    • - *
    • {@code "23.42"^^xsd:Double} results in {@code 23.42E0}, and
    • - *
    • {@code "test"^^} results in - * {@code "test"^^}, modulo transformation of the datatype - * IRI.
    • - *
    - * - * @see Rule syntax - * @param datatypeConstant a {@link DatatypeConstant} - * @param iriTransformer a function to transform IRIs with. - * @return String representation corresponding to a given - * {@link DatatypeConstant}. - */ - public static String getString(final DatatypeConstant datatypeConstant, Function iriTransformer) { - if (datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_STRING)) { - return getString(datatypeConstant.getLexicalValue()); - } else if (datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_DECIMAL) - || datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_INTEGER) - || datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_DOUBLE)) { - return datatypeConstant.getLexicalValue(); - } - - return getConstantName(datatypeConstant, iriTransformer); - } - - /** - * Creates a String representation corresponding to the given - * {@link DatatypeConstant}. For datatypes that have specialised lexical - * representations (i.e., xsd:String, xsd:Decimal, xsd:Integer, and xsd:Double), - * this representation is returned, otherwise the result is a generic literal - * with full datatype IRI. - * - * examples: - *
      - *
    • {@code "string"^^xsd:String} results in {@code "string"},
    • - *
    • {@code "23.0"^^xsd:Decimal} results in {@code 23.0},
    • - *
    • {@code "42"^^xsd:Integer} results in {@code 42},
    • - *
    • {@code "23.42"^^xsd:Double} results in {@code 23.42E0}, and
    • - *
    • {@code "test"^^} results in - * {@code "test"^^}.
    • - *
    - * - * @param datatypeConstant a {@link DatatypeConstant} - * @return String representation corresponding to a given - * {@link DatatypeConstant}. - */ - public static String getString(final DatatypeConstant datatypeConstant) { - return getString(datatypeConstant, Function.identity()); - } - - /** - * Creates a String representation corresponding to the name of a given - * {@link DatatypeConstant} including an IRI. - * - * @see Rule syntax - * @param datatypeConstant a {@link DatatypeConstant} - * @return String representation corresponding to a given - * {@link DatatypeConstant}. - */ - private static String getConstantName(final DatatypeConstant datatypeConstant, - Function iriTransformer) { - return getString(datatypeConstant.getLexicalValue()) + DOUBLE_CARET - + getIRIString(datatypeConstant.getDatatype(), iriTransformer); - } - - /** - * Creates a String representation corresponding to the name of a given - * {@link DatatypeConstant} including an IRI. - * - * @see Rule syntax - * @param datatypeConstant a {@link DatatypeConstant} - * @return String representation corresponding to a given - * {@link DatatypeConstant}. - */ - public static String getConstantName(final DatatypeConstant datatypeConstant) { - return getString(datatypeConstant.getLexicalValue()) + DOUBLE_CARET - + addAngleBrackets(datatypeConstant.getDatatype()); - } - - /** - * Creates a String representation of a given {@link ExistentialVariable}. - * - * @see Rule syntax - * @param existentialVariable a {@link ExistentialVariable} - * @return String representation corresponding to a given - * {@link ExistentialVariable}. - */ - public static String getString(final ExistentialVariable existentialVariable) { - return EXISTENTIAL_IDENTIFIER + existentialVariable.getName(); - } - - /** - * Creates a String representation of a given {@link UniversalVariable}. - * - * @see Rule syntax - * @param universalVariable a {@link UniversalVariable} - * @return String representation corresponding to a given - * {@link UniversalVariable}. - */ - public static String getString(final UniversalVariable universalVariable) { - return UNIVERSAL_IDENTIFIER + universalVariable.getName(); - } - - /** - * Creates a String representation of a given {@link NamedNull}. - * - * @see Rule syntax - * @param namedNull a {@link NamedNull} - * @return String representation corresponding to a given {@link NamedNull}. - */ - public static String getString(final NamedNull namedNull) { - return NAMEDNULL_IDENTIFIER + namedNull.getName(); - } - - /** - * Creates a String representation of a given {@link Predicate}. - * - * @see Rule syntax - * @param predicate a {@link Predicate} - * @return String representation corresponding to a given {@link Predicate}. - */ - public static String getString(final Predicate predicate) { - return predicate.getName() + OPENING_BRACKET + predicate.getArity() + CLOSING_BRACKET; - } - - /** - * Creates a String representation of a given {@link DataSourceDeclaration}. - * - * @see Rule syntax - * @param dataSourceDeclaration a {@link DataSourceDeclaration} - * @return String representation corresponding to a given - * {@link DataSourceDeclaration}. - */ - public static String getString(final DataSourceDeclaration dataSourceDeclaration) { - return DATA_SOURCE + getString(dataSourceDeclaration.getPredicate()) + DATA_SOURCE_SEPARATOR - + dataSourceDeclaration.getDataSource().getSyntacticRepresentation() + STATEMENT_SEPARATOR; - } - - /** - * Creates a String representation of a given {@link CsvFileDataSource}. - * - * @see Rule syntax - * - * @param csvFileDataSource - * @return String representation corresponding to a given - * {@link CsvFileDataSource}. - */ - public static String getString(final CsvFileDataSource csvFileDataSource) { - return CSV_FILE_DATA_SOURCE + OPENING_PARENTHESIS + getFileString(csvFileDataSource) + CLOSING_PARENTHESIS; - } - - /** - * Creates a String representation of a given {@link RdfFileDataSource}. - * - * @see Rule syntax - * - * - * @param rdfFileDataSource - * @return String representation corresponding to a given - * {@link RdfFileDataSource}. - */ - public static String getString(final RdfFileDataSource rdfFileDataSource) { - return RDF_FILE_DATA_SOURCE + OPENING_PARENTHESIS + getFileString(rdfFileDataSource) + CLOSING_PARENTHESIS; - } - - /** - * Creates a String representation of a given - * {@link SparqlQueryResultDataSource}. - * - * @see Rule syntax - * - * - * @param dataSource - * @return String representation corresponding to a given - * {@link SparqlQueryResultDataSource}. - */ - public static String getString(final SparqlQueryResultDataSource dataSource) { - return SPARQL_QUERY_RESULT_DATA_SOURCE + OPENING_PARENTHESIS - + addAngleBrackets(dataSource.getEndpoint().toString()) + COMMA - + addQuotes(dataSource.getQueryVariables()) + COMMA + addQuotes(dataSource.getQueryBody()) - + CLOSING_PARENTHESIS; - } - - private static String getFileString(final FileDataSource fileDataSource) { - return getString(fileDataSource.getPath()); - } - - private static String getIRIString(final String string) { - return getIRIString(string, Function.identity()); - } - - private static String getIRIString(final String string, Function iriTransformer) { - String transformed = iriTransformer.apply(string); - - if (!transformed.equals(string)) { - return transformed; - } - - if (string.contains(COLON) || string.matches(REGEX_INTEGER) || string.matches(REGEX_DOUBLE) - || string.matches(REGEX_DECIMAL) || string.equals(REGEX_TRUE) || string.equals(REGEX_FALSE)) { - return addAngleBrackets(string); - } - - return string; - } - - /** - * Constructs the parseable, serialized representation of given {@code string}. - * Escapes (with {@code \}) special character occurrences in given - * {@code string}, and surrounds the result with double quotation marks - * ({@code "}). The special characters are: - *
      - *
    • {@code \}
    • - *
    • {@code "}
    • - *
    • {@code \t}
    • - *
    • {@code \b}
    • - *
    • {@code \n}
    • - *
    • {@code \r}
    • - *
    • {@code \f}
    • - *
    - * Example for {@code string = "\\a"}, the returned value is - * {@code string = "\"\\\\a\""} - * - * @param string - * @return an escaped string surrounded by {@code "}. - */ - public static String getString(final String string) { - return addQuotes(escape(string)); - } - - /** - * Escapes (with {@code \}) special character occurrences in given - * {@code string}. The special characters are: - *
      - *
    • {@code \}
    • - *
    • {@code "}
    • - *
    • {@code \t}
    • - *
    • {@code \b}
    • - *
    • {@code \n}
    • - *
    • {@code \r}
    • - *
    • {@code \f}
    • - *
    - * - * @param string - * @return an escaped string - */ - private static String escape(final String string) { - return string.replace("\\", "\\\\").replace("\"", "\\\"").replace("\t", "\\t").replace("\b", "\\b") - .replace(NEW_LINE, "\\n").replace("\r", "\\r").replace("\f", "\\f"); - // don't touch single quotes here since we only construct double-quoted strings - } - - private static String addQuotes(final String string) { - return QUOTE + string + QUOTE; - } - - private static String addAngleBrackets(final String string) { - return LESS_THAN + string + MORE_THAN; - } - - public static String getFactString(Predicate predicate, List terms) { - return getString(predicate, terms) + STATEMENT_SEPARATOR + NEW_LINE; - } - - public static String getFactString(Predicate predicate, List terms, Function iriTransformer) { - return getString(predicate, terms, iriTransformer) + STATEMENT_SEPARATOR + NEW_LINE; - } - - public static String getString(Predicate predicate, List terms) { - return getString(predicate, terms, Function.identity()); - } - - public static String getString(Predicate predicate, List terms, Function iriTransformer) { - final StringBuilder stringBuilder = new StringBuilder(getIRIString(predicate.getName(), iriTransformer)); - stringBuilder.append(OPENING_PARENTHESIS); - - boolean first = true; - for (final Term term : terms) { - if (first) { - first = false; - } else { - stringBuilder.append(COMMA); - } - final String string = term.getSyntacticRepresentation(iriTransformer); - stringBuilder.append(string); - } - stringBuilder.append(CLOSING_PARENTHESIS); - return stringBuilder.toString(); - } - - public static String getBaseString(KnowledgeBase knowledgeBase) { - String baseIri = knowledgeBase.getBaseIri(); - - return baseIri.equals(PrefixDeclarationRegistry.EMPTY_BASE) ? baseIri : getBaseDeclarationString(baseIri); - } - - private static String getBaseDeclarationString(String baseIri) { - return BASE + addAngleBrackets(baseIri) + STATEMENT_SEPARATOR + NEW_LINE; - } - - public static String getPrefixString(Entry prefix) { - return PREFIX + prefix.getKey() + " " + addAngleBrackets(prefix.getValue()) + STATEMENT_SEPARATOR + NEW_LINE; - } - - public static String getBaseAndPrefixDeclarations(KnowledgeBase knowledgeBase) { - StringBuilder sb = new StringBuilder(); - - sb.append(getBaseString(knowledgeBase)); - knowledgeBase.getPrefixes().forEachRemaining(prefix -> sb.append(getPrefixString(prefix))); - - return sb.toString(); - } -} +package org.semanticweb.rulewerk.core.model.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.List; +import java.util.Map.Entry; +import java.util.function.Function; + +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; +import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; + +/** + * A utility class with static methods to obtain the correct parsable string + * representation of the different data models. + * + * @author Ali Elhalawati + * + */ +public final class Serializer { + private static final String NEW_LINE = "\n"; + public static final String STATEMENT_SEPARATOR = " ."; + public static final String COMMA = ", "; + public static final String NEGATIVE_IDENTIFIER = "~"; + public static final String EXISTENTIAL_IDENTIFIER = "!"; + public static final String UNIVERSAL_IDENTIFIER = "?"; + public static final String NAMEDNULL_IDENTIFIER = "_:"; + public static final String OPENING_PARENTHESIS = "("; + public static final String CLOSING_PARENTHESIS = ")"; + public static final String OPENING_BRACKET = "["; + public static final String CLOSING_BRACKET = "]"; + public static final String RULE_SEPARATOR = " :- "; + public static final char AT = '@'; + public static final String DATA_SOURCE = "@source "; + public static final String BASE = "@base "; + public static final String PREFIX = "@prefix "; + public static final String CSV_FILE_DATA_SOURCE = "load-csv"; + public static final String RDF_FILE_DATA_SOURCE = "load-rdf"; + public static final String SPARQL_QUERY_RESULT_DATA_SOURCE = "sparql"; + public static final String DATA_SOURCE_SEPARATOR = ": "; + public static final String COLON = ":"; + public static final String DOUBLE_CARET = "^^"; + public static final char LESS_THAN = '<'; + public static final char MORE_THAN = '>'; + public static final char QUOTE = '"'; + + public static final String REGEX_DOUBLE = "^[-+]?[0-9]+[.]?[0-9]*([eE][-+]?[0-9]+)?$"; + public static final String REGEX_INTEGER = "^[-+]?\\d+$"; + public static final String REGEX_DECIMAL = "^(\\d*\\.)?\\d+$"; + public static final String REGEX_TRUE = "true"; + public static final String REGEX_FALSE = "false"; + + /** + * Constructor. + */ + private Serializer() { + + } + + /** + * Creates a String representation of a given {@link Rule}. + * + * @see Rule + * syntax + * @param rule a {@link Rule}. + * @return String representation corresponding to a given {@link Rule}. + * + */ + public static String getString(final Rule rule) { + return getString(rule.getHead()) + RULE_SEPARATOR + getString(rule.getBody()) + STATEMENT_SEPARATOR; + } + + /** + * Creates a String representation of a given {@link Conjunction}. + * + * @see Rule + * syntax + * @param conjunction a {@link Conjunction} + * @return String representation corresponding to a given {@link Conjunction}. + */ + public static String getString(final Conjunction conjunction) { + final StringBuilder stringBuilder = new StringBuilder(); + boolean first = true; + for (final Literal literal : conjunction.getLiterals()) { + if (first) { + first = false; + } else { + stringBuilder.append(COMMA); + } + stringBuilder.append(getString(literal)); + } + return stringBuilder.toString(); + } + + /** + * Creates a String representation of a given {@link Literal}. + * + * @see Rule + * syntax + * @param literal a {@link Literal} + * @return String representation corresponding to a given {@link Literal}. + */ + public static String getString(final Literal literal) { + final StringBuilder stringBuilder = new StringBuilder(""); + if (literal.isNegated()) { + stringBuilder.append(NEGATIVE_IDENTIFIER); + } + stringBuilder.append(getString(literal.getPredicate(), literal.getArguments())); + return stringBuilder.toString(); + } + + /** + * Creates a String representation of a given {@link Fact}. + * + * @see Rule + * syntax + * @param fact a {@link Fact} + * @return String representation corresponding to a given {@link Fact}. + */ + public static String getFactString(final Fact fact) { + return getString(fact) + STATEMENT_SEPARATOR; + } + + /** + * Creates a String representation of a given {@link AbstractConstant}. + * + * @see Rule + * syntax + * @param constant a {@link AbstractConstant} + * @param iriTransformer a function to transform IRIs with. + * @return String representation corresponding to a given + * {@link AbstractConstant}. + */ + public static String getString(final AbstractConstant constant, final Function iriTransformer) { + return getIRIString(constant.getName(), iriTransformer); + } + + /** + * Creates a String representation of a given {@link AbstractConstant}. + * + * @see Rule + * syntax + * @param constant a {@link AbstractConstant} + * @return String representation corresponding to a given + * {@link AbstractConstant}. + */ + public static String getString(final AbstractConstant constant) { + return getIRIString(constant.getName()); + } + + /** + * Creates a String representation corresponding to the name of a given + * {@link LanguageStringConstant}. + * + * @see Rule + * syntax + * @param languageStringConstant a {@link LanguageStringConstant} + * @return String representation corresponding to the name of a given + * {@link LanguageStringConstant}. + */ + public static String getConstantName(final LanguageStringConstant languageStringConstant) { + return getString(languageStringConstant.getString()) + AT + languageStringConstant.getLanguageTag(); + } + + /** + * Creates a String representation corresponding to the given + * {@link DatatypeConstant}. For datatypes that have specialised lexical + * representations (i.e., xsd:String, xsd:Decimal, xsd:Integer, and xsd:Double), + * this representation is returned, otherwise the result is a generic literal + * with full datatype IRI. + * + * examples: + *
      + *
    • {@code "string"^^xsd:String} results in {@code "string"},
    • + *
    • {@code "23.0"^^xsd:Decimal} results in {@code 23.0},
    • + *
    • {@code "42"^^xsd:Integer} results in {@code 42},
    • + *
    • {@code "23.42"^^xsd:Double} results in {@code 23.42E0}, and
    • + *
    • {@code "test"^^} results in + * {@code "test"^^}, modulo transformation of the datatype + * IRI.
    • + *
    + * + * @see Rule + * syntax + * @param datatypeConstant a {@link DatatypeConstant} + * @param iriTransformer a function to transform IRIs with. + * @return String representation corresponding to a given + * {@link DatatypeConstant}. + */ + public static String getString(final DatatypeConstant datatypeConstant, + final Function iriTransformer) { + if (datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_STRING)) { + return getString(datatypeConstant.getLexicalValue()); + } else if (datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_DECIMAL) + || datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_INTEGER) + || datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_DOUBLE)) { + return datatypeConstant.getLexicalValue(); + } + + return getConstantName(datatypeConstant, iriTransformer); + } + + /** + * Creates a String representation corresponding to the given + * {@link DatatypeConstant}. For datatypes that have specialised lexical + * representations (i.e., xsd:String, xsd:Decimal, xsd:Integer, and xsd:Double), + * this representation is returned, otherwise the result is a generic literal + * with full datatype IRI. + * + * examples: + *
      + *
    • {@code "string"^^xsd:String} results in {@code "string"},
    • + *
    • {@code "23.0"^^xsd:Decimal} results in {@code 23.0},
    • + *
    • {@code "42"^^xsd:Integer} results in {@code 42},
    • + *
    • {@code "23.42"^^xsd:Double} results in {@code 23.42E0}, and
    • + *
    • {@code "test"^^} results in + * {@code "test"^^}.
    • + *
    + * + * @param datatypeConstant a {@link DatatypeConstant} + * @return String representation corresponding to a given + * {@link DatatypeConstant}. + */ + public static String getString(final DatatypeConstant datatypeConstant) { + return getString(datatypeConstant, Function.identity()); + } + + /** + * Creates a String representation corresponding to the name of a given + * {@link DatatypeConstant} including an IRI. + * + * @see Rule + * syntax + * @param datatypeConstant a {@link DatatypeConstant} + * @return String representation corresponding to a given + * {@link DatatypeConstant}. + */ + private static String getConstantName(final DatatypeConstant datatypeConstant, + final Function iriTransformer) { + return getString(datatypeConstant.getLexicalValue()) + DOUBLE_CARET + + getIRIString(datatypeConstant.getDatatype(), iriTransformer); + } + + /** + * Creates a String representation corresponding to the name of a given + * {@link DatatypeConstant} including an IRI. + * + * @see Rule + * syntax + * @param datatypeConstant a {@link DatatypeConstant} + * @return String representation corresponding to a given + * {@link DatatypeConstant}. + */ + public static String getConstantName(final DatatypeConstant datatypeConstant) { + return getString(datatypeConstant.getLexicalValue()) + DOUBLE_CARET + + addAngleBrackets(datatypeConstant.getDatatype()); + } + + /** + * Creates a String representation of a given {@link ExistentialVariable}. + * + * @see Rule + * syntax + * @param existentialVariable a {@link ExistentialVariable} + * @return String representation corresponding to a given + * {@link ExistentialVariable}. + */ + public static String getString(final ExistentialVariable existentialVariable) { + return EXISTENTIAL_IDENTIFIER + existentialVariable.getName(); + } + + /** + * Creates a String representation of a given {@link UniversalVariable}. + * + * @see Rule + * syntax + * @param universalVariable a {@link UniversalVariable} + * @return String representation corresponding to a given + * {@link UniversalVariable}. + */ + public static String getString(final UniversalVariable universalVariable) { + return UNIVERSAL_IDENTIFIER + universalVariable.getName(); + } + + /** + * Creates a String representation of a given {@link NamedNull}. + * + * @see Rule + * syntax + * @param namedNull a {@link NamedNull} + * @return String representation corresponding to a given {@link NamedNull}. + */ + public static String getString(final NamedNull namedNull) { + return NAMEDNULL_IDENTIFIER + namedNull.getName(); + } + + /** + * Creates a String representation of a given {@link Predicate}. + * + * @see Rule + * syntax + * @param predicate a {@link Predicate} + * @return String representation corresponding to a given {@link Predicate}. + */ + public static String getString(final Predicate predicate) { + return predicate.getName() + OPENING_BRACKET + predicate.getArity() + CLOSING_BRACKET; + } + + /** + * Creates a String representation of a given {@link DataSourceDeclaration}. + * + * @see Rule + * syntax + * @param dataSourceDeclaration a {@link DataSourceDeclaration} + * @return String representation corresponding to a given + * {@link DataSourceDeclaration}. + */ + public static String getString(final DataSourceDeclaration dataSourceDeclaration) { + return DATA_SOURCE + getString(dataSourceDeclaration.getPredicate()) + DATA_SOURCE_SEPARATOR + + dataSourceDeclaration.getDataSource().getSyntacticRepresentation() + STATEMENT_SEPARATOR; + } + + /** + * Creates a String representation of a given {@link CsvFileDataSource}. + * + * @see Rule + * syntax + * + * @param csvFileDataSource + * @return String representation corresponding to a given + * {@link CsvFileDataSource}. + */ + public static String getString(final CsvFileDataSource csvFileDataSource) { + return CSV_FILE_DATA_SOURCE + OPENING_PARENTHESIS + getFileString(csvFileDataSource) + CLOSING_PARENTHESIS; + } + + /** + * Creates a String representation of a given {@link RdfFileDataSource}. + * + * @see Rule + * syntax + * + * + * @param rdfFileDataSource + * @return String representation corresponding to a given + * {@link RdfFileDataSource}. + */ + public static String getString(final RdfFileDataSource rdfFileDataSource) { + return RDF_FILE_DATA_SOURCE + OPENING_PARENTHESIS + getFileString(rdfFileDataSource) + CLOSING_PARENTHESIS; + } + + /** + * Creates a String representation of a given + * {@link SparqlQueryResultDataSource}. + * + * @see Rule + * syntax + * + * + * @param dataSource + * @return String representation corresponding to a given + * {@link SparqlQueryResultDataSource}. + */ + public static String getString(final SparqlQueryResultDataSource dataSource) { + return SPARQL_QUERY_RESULT_DATA_SOURCE + OPENING_PARENTHESIS + + addAngleBrackets(dataSource.getEndpoint().toString()) + COMMA + + addQuotes(dataSource.getQueryVariables()) + COMMA + addQuotes(dataSource.getQueryBody()) + + CLOSING_PARENTHESIS; + } + + private static String getFileString(final FileDataSource fileDataSource) { + return getString(fileDataSource.getPath()); + } + + private static String getIRIString(final String string) { + return getIRIString(string, Function.identity()); + } + + private static String getIRIString(final String string, final Function iriTransformer) { + final String transformed = iriTransformer.apply(string); + + if (!transformed.equals(string)) { + return transformed; + } + + if (string.contains(COLON) || string.matches(REGEX_INTEGER) || string.matches(REGEX_DOUBLE) + || string.matches(REGEX_DECIMAL) || string.equals(REGEX_TRUE) || string.equals(REGEX_FALSE)) { + return addAngleBrackets(string); + } + + return string; + } + + /** + * Constructs the parseable, serialized representation of given {@code string}. + * Escapes (with {@code \}) special character occurrences in given + * {@code string}, and surrounds the result with double quotation marks + * ({@code "}). The special characters are: + *
      + *
    • {@code \}
    • + *
    • {@code "}
    • + *
    • {@code \t}
    • + *
    • {@code \b}
    • + *
    • {@code \n}
    • + *
    • {@code \r}
    • + *
    • {@code \f}
    • + *
    + * Example for {@code string = "\\a"}, the returned value is + * {@code string = "\"\\\\a\""} + * + * @param string + * @return an escaped string surrounded by {@code "}. + */ + public static String getString(final String string) { + return addQuotes(escape(string)); + } + + /** + * Escapes (with {@code \}) special character occurrences in given + * {@code string}. The special characters are: + *
      + *
    • {@code \}
    • + *
    • {@code "}
    • + *
    • {@code \t}
    • + *
    • {@code \b}
    • + *
    • {@code \n}
    • + *
    • {@code \r}
    • + *
    • {@code \f}
    • + *
    + * + * @param string + * @return an escaped string + */ + private static String escape(final String string) { + return string.replace("\\", "\\\\").replace("\"", "\\\"").replace("\t", "\\t").replace("\b", "\\b") + .replace(NEW_LINE, "\\n").replace("\r", "\\r").replace("\f", "\\f"); + // don't touch single quotes here since we only construct double-quoted strings + } + + private static String addQuotes(final String string) { + return QUOTE + string + QUOTE; + } + + private static String addAngleBrackets(final String string) { + return LESS_THAN + string + MORE_THAN; + } + + public static String getFactString(final Predicate predicate, final List terms) { + return getString(predicate, terms) + STATEMENT_SEPARATOR + NEW_LINE; + } + + public static String getFactString(final Predicate predicate, final List terms, + final Function iriTransformer) { + return getString(predicate, terms, iriTransformer) + STATEMENT_SEPARATOR + NEW_LINE; + } + + public static String getString(final Predicate predicate, final List terms) { + return getString(predicate, terms, Function.identity()); + } + + public static String getString(final Predicate predicate, final List terms, + final Function iriTransformer) { + final StringBuilder stringBuilder = new StringBuilder(getIRIString(predicate.getName(), iriTransformer)); + stringBuilder.append(OPENING_PARENTHESIS); + + boolean first = true; + for (final Term term : terms) { + if (first) { + first = false; + } else { + stringBuilder.append(COMMA); + } + final String string = term.getSyntacticRepresentation(iriTransformer); + stringBuilder.append(string); + } + stringBuilder.append(CLOSING_PARENTHESIS); + return stringBuilder.toString(); + } + + public static String getBaseString(final KnowledgeBase knowledgeBase) { + final String baseIri = knowledgeBase.getBaseIri(); + + return baseIri.equals(PrefixDeclarationRegistry.EMPTY_BASE) ? baseIri : getBaseDeclarationString(baseIri); + } + + private static String getBaseDeclarationString(final String baseIri) { + return BASE + addAngleBrackets(baseIri) + STATEMENT_SEPARATOR + NEW_LINE; + } + + public static String getPrefixString(final Entry prefix) { + return PREFIX + prefix.getKey() + " " + addAngleBrackets(prefix.getValue()) + STATEMENT_SEPARATOR + NEW_LINE; + } + + public static String getBaseAndPrefixDeclarations(final KnowledgeBase knowledgeBase) { + final StringBuilder sb = new StringBuilder(); + + sb.append(getBaseString(knowledgeBase)); + knowledgeBase.getPrefixes().forEachRemaining(prefix -> sb.append(getPrefixString(prefix))); + + return sb.toString(); + } + + public static String getCommandName(final String commandName) { + return AT + commandName; + } +} From 6b792094b13b9c7fc6f2a5c4390626bcbae8a3d4 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 21 Aug 2020 21:07:29 +0200 Subject: [PATCH 0691/1003] Disable warnings when building VLog in CI --- build-vlog-library.sh | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/build-vlog-library.sh b/build-vlog-library.sh index c1d915137..ff059801f 100755 --- a/build-vlog-library.sh +++ b/build-vlog-library.sh @@ -15,7 +15,13 @@ else # git pull mkdir build cd build - cmake -DJAVA=1 -DSPARQL=1 .. + if [ "x${CI}" = "xtrue" ] + then + # disable warnings when running in CI to keep travis logs short + cmake -DJAVA=1 -DSPARQL=1 -DCMAKE_CXX_FLAGS=-w .. + else + cmake -DJAVA=1 -DSPARQL=1 .. + fi make cp jvlog.jar ../../../local_builds/jvlog.jar cd ../../.. From b3aa1f63bd73e482ea9e63aa3062c87b2166996c Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sat, 22 Aug 2020 14:34:14 +0200 Subject: [PATCH 0692/1003] More efficient IRI unresolving --- .../model/api/PrefixDeclarationRegistry.java | 3 +- .../AbstractPrefixDeclarationRegistry.java | 29 +++++++++++++ .../MergingPrefixDeclarationRegistry.java | 43 +------------------ .../MergingPrefixDeclarationRegistryTest.java | 13 +++--- 4 files changed, 40 insertions(+), 48 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java index a3ac69b9c..3fdd3b939 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java @@ -91,7 +91,7 @@ public interface PrefixDeclarationRegistry extends Iterableprefixed * name into an absolute IRI. @@ -114,4 +114,5 @@ public interface PrefixDeclarationRegistry extends Iterable baseIri.length() + && iri.startsWith(baseIri)) { + shortestIri = iri.substring(baseIri.length()); + } + + for (Map.Entry entry : prefixes.entrySet()) { + int localNameLength = iri.length() - entry.getValue().length(); + if (localNameLength > 0 && shortestIri.length() > localNameLength + entry.getKey().length() + && iri.startsWith(entry.getValue())) { + shortestIri = entry.getKey() + iri.substring(entry.getValue().length()); + } + } + + return shortestIri; + } @Override public Iterator> iterator() { diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java index c9104a280..5dade8819 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java @@ -20,11 +20,6 @@ * #L% */ -import java.util.ArrayList; -import java.util.Comparator; -import java.util.HashMap; -import java.util.List; -import java.util.Map; import java.util.Map.Entry; import org.apache.commons.lang3.Validate; @@ -47,7 +42,7 @@ final public class MergingPrefixDeclarationRegistry extends AbstractPrefixDeclar /** * Template string to use for generated prefix name */ - private static final String GENERATED_PREFIX_TEMPLATE = "rulewerk_generated_%d" + private static final String GENERATED_PREFIX_TEMPLATE = "rw_gen%d" + PrefixDeclarationRegistry.PREFIX_NAME_SEPARATOR; public MergingPrefixDeclarationRegistry() { @@ -99,42 +94,6 @@ public void setPrefixIri(String prefixName, String prefixIri) { prefixes.put(name, prefixIri); } - /** - * Turn an absolute Iri into a (possibly) prefixed name. Dual to - * {@link AbstractPrefixDeclarationRegistry#resolvePrefixedName}. - * - * @param iri an absolute Iri to abbreviate. - * - * @return an abbreviated form of {@code iri} if an appropriate prefix is known, - * or {@code iri}. - */ - public String unresolveAbsoluteIri(String iri) { - Map matches = new HashMap<>(); - String baseIri = getBaseIri(); - - if (baseIri != PrefixDeclarationRegistry.EMPTY_BASE && iri.startsWith(baseIri) && !iri.equals(baseIri)) { - matches.put(iri.replaceFirst(baseIri, PrefixDeclarationRegistry.EMPTY_BASE), baseIri.length()); - } - - prefixes.forEach((prefixName, prefixIri) -> { - // only select proper prefixes here, since `eg:` is not a valid prefixed name. - if (iri.startsWith(prefixIri) && !iri.equals(prefixIri)) { - matches.put(iri.replaceFirst(prefixIri, prefixName), prefixIri.length()); - } - }); - - List matchesByLength = new ArrayList<>(matches.keySet()); - // reverse order, so we get the longest match first - matchesByLength.sort(Comparator.comparing(matches::get).reversed()); - - if (matchesByLength.size() > 0) { - return matchesByLength.get(0); - } else { - // no matching prefix - return iri; - } - } - /** * Merge another set of prefix declarations. * diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java index 812385669..9bf049824 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java @@ -104,7 +104,7 @@ public void setPrefixIri_redeclarePrefix_succeeds() throws PrefixDeclarationExce @Test public void getFreshPrefix_registeredPrefix_returnsFreshPrefix() throws PrefixDeclarationException { - String prefix = "rulewerk_generated_"; + String prefix = "rw_gen"; prefixDeclarations.setPrefixIri(prefix + "0:", BASE + "generated/"); prefixDeclarations.setPrefixIri("eg:", BASE); prefixDeclarations.setPrefixIri("eg:", MORE_SPECIFIC); @@ -128,7 +128,7 @@ public void mergePrefixDeclarations_conflictingPrefixName_renamesConflictingPref prefixDeclarations.setPrefixIri("eg:", MORE_SPECIFIC); this.prefixDeclarations.mergePrefixDeclarations(prefixDeclarations); assertEquals(BASE, this.prefixDeclarations.getPrefixIri("eg:")); - assertEquals(MORE_SPECIFIC, this.prefixDeclarations.getPrefixIri("rulewerk_generated_0:")); + assertEquals(MORE_SPECIFIC, this.prefixDeclarations.getPrefixIri("rw_gen0:")); } @Test @@ -188,7 +188,8 @@ public void unresolveAbsoluteIri_resolvePrefixedName_doesRoundTrip() throws Pref } @Test - public void unresolveAbsoluteIri_relativeIriAfterMergeWithNewBase_staysRelative() throws PrefixDeclarationException { + public void unresolveAbsoluteIri_relativeIriAfterMergeWithNewBase_staysRelative() + throws PrefixDeclarationException { String relativeIri = this.prefixDeclarations.absolutizeIri(RELATIVE); PrefixDeclarationRegistry prefixDeclarations = new MergingPrefixDeclarationRegistry(); prefixDeclarations.setBaseIri(BASE); @@ -198,13 +199,15 @@ public void unresolveAbsoluteIri_relativeIriAfterMergeWithNewBase_staysRelative( @Test public void unresolveAbsoluteIri_absoluteIriMergedOntoEmptyBase_staysAbsolute() throws PrefixDeclarationException { - assertEquals("", this.prefixDeclarations.getBaseIri()); + assertEquals("", this.prefixDeclarations.getBaseIri()); // FIXME: why test this? + PrefixDeclarationRegistry prefixDeclarations = new MergingPrefixDeclarationRegistry(); prefixDeclarations.setBaseIri(BASE); String absoluteIri = prefixDeclarations.absolutizeIri(RELATIVE); this.prefixDeclarations.mergePrefixDeclarations(prefixDeclarations); String resolvedIri = this.prefixDeclarations.unresolveAbsoluteIri(absoluteIri); + assertNotEquals(RELATIVE, resolvedIri); - assertEquals("rulewerk_generated_0:" + RELATIVE, resolvedIri); + assertEquals("rw_gen0:" + RELATIVE, resolvedIri); } } From da8af909920e461aaf3d582c070ccfe0423ed225 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sat, 22 Aug 2020 14:38:56 +0200 Subject: [PATCH 0693/1003] generally support IRI unresolving --- .../core/model/api/PrefixDeclarationRegistry.java | 10 ++++++++++ .../AbstractPrefixDeclarationRegistry.java | 14 +++----------- 2 files changed, 13 insertions(+), 11 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java index 3fdd3b939..9727b6e1b 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java @@ -115,4 +115,14 @@ public interface PrefixDeclarationRegistry extends Iterable Date: Sat, 22 Aug 2020 17:25:50 +0200 Subject: [PATCH 0694/1003] avoid ambiguous abbreviated IRIs do not use local names like "123" or "true" --- .../implementation/AbstractPrefixDeclarationRegistry.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java index b23166975..8186347b6 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java @@ -104,6 +104,12 @@ public String unresolveAbsoluteIri(String iri) { if (baseIri != PrefixDeclarationRegistry.EMPTY_BASE && iri.length() > baseIri.length() && iri.startsWith(baseIri)) { shortestIri = iri.substring(baseIri.length()); + // Only allow very simple names of this form, to avoid confusion, e.g., with + // numbers or boolean literals: + if (!shortestIri.matches("^[a-zA-Z]([/a-zA-Z0-9_-])*$") || "true".equals(shortestIri) + || "false".equals(shortestIri)) { + shortestIri = iri; + } } for (Map.Entry entry : prefixes.entrySet()) { From a8faf2e516cdc4def2672da8eae18e482836678d Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sat, 22 Aug 2020 23:05:54 +0200 Subject: [PATCH 0695/1003] Prepare new Serializer class --- .../client/shell/DefaultConfiguration.java | 4 +- .../core/model/api/AbstractConstant.java | 6 +- .../rulewerk/core/model/api/Command.java | 8 +- .../rulewerk/core/model/api/Conjunction.java | 4 +- .../rulewerk/core/model/api/DataSource.java | 9 + .../core/model/api/DataSourceDeclaration.java | 4 +- .../core/model/api/DatatypeConstant.java | 6 +- .../core/model/api/ExistentialVariable.java | 4 +- .../rulewerk/core/model/api/Fact.java | 4 +- .../model/api/LanguageStringConstant.java | 4 +- .../rulewerk/core/model/api/Literal.java | 4 +- .../rulewerk/core/model/api/NamedNull.java | 4 +- .../rulewerk/core/model/api/Predicate.java | 4 +- .../model/api/PrefixDeclarationRegistry.java | 6 +- .../rulewerk/core/model/api/Rule.java | 4 +- .../core/model/api/UniversalVariable.java | 4 +- .../AbstractPrefixDeclarationRegistry.java | 12 +- .../implementation/DatatypeConstantImpl.java | 2 +- .../LanguageStringConstantImpl.java | 2 +- .../core/model/implementation/Serializer.java | 1020 ++++++++--------- .../rulewerk/core/reasoner/KnowledgeBase.java | 12 +- .../rulewerk/core/reasoner/Reasoner.java | 6 +- .../implementation/CsvFileDataSource.java | 10 +- .../implementation/FileDataSource.java | 19 + .../implementation/QueryResultImpl.java | 2 +- .../implementation/RdfFileDataSource.java | 9 +- .../SparqlQueryResultDataSource.java | 25 +- .../core/model/DataSourceDeclarationTest.java | 6 +- .../MergingPrefixDeclarationRegistryTest.java | 26 +- .../rulewerk/core/model/SerializerTest.java | 214 ++++ .../parser/DefaultParserConfiguration.java | 8 +- 31 files changed, 812 insertions(+), 640 deletions(-) create mode 100644 rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java index 4cdc16ef7..927c61701 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java @@ -34,7 +34,7 @@ import org.jline.utils.AttributedString; import org.jline.utils.AttributedStyle; import org.semanticweb.rulewerk.commands.Interpreter; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; public final class DefaultConfiguration { @@ -63,7 +63,7 @@ public static LineReader buildLineReader(final Terminal terminal, final Interpre private static Completer buildCompleter(final Interpreter interpreter) { final Set registeredCommandNames = interpreter.getRegisteredCommands(); final List serializedCommandNames = registeredCommandNames.stream() - .map(commandName -> Serializer.getCommandName(commandName)) + .map(commandName -> OldSerializer.getCommandName(commandName)) .collect(Collectors.toList()); return new StringsCompleter(serializedCommandNames); } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/AbstractConstant.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/AbstractConstant.java index 750434b73..70592a3e6 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/AbstractConstant.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/AbstractConstant.java @@ -22,7 +22,7 @@ import java.util.function.Function; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; /** * Interface for abstract constants, i.e. for constants that represent an @@ -40,11 +40,11 @@ default TermType getType() { @Override default String getSyntacticRepresentation() { - return Serializer.getString(this); + return OldSerializer.getString(this); } @Override default String getSyntacticRepresentation(Function iriTransformer) { - return Serializer.getString(this, iriTransformer); + return OldSerializer.getString(this, iriTransformer); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java index 3af723089..fce5aa32a 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java @@ -22,7 +22,7 @@ import java.util.List; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; /** * Class for representing a generic command that can be executed. @@ -72,15 +72,15 @@ public String getSyntacticRepresentation() { result.append(" "); if (argument.fromRule().isPresent()) { Rule rule = argument.fromRule().get(); - result.append(Serializer.getString(rule.getHead())).append(Serializer.RULE_SEPARATOR) - .append(Serializer.getString(rule.getBody())); + result.append(OldSerializer.getString(rule.getHead())).append(OldSerializer.RULE_SEPARATOR) + .append(OldSerializer.getString(rule.getBody())); } else if (argument.fromPositiveLiteral().isPresent()) { result.append(argument.fromPositiveLiteral().get().getSyntacticRepresentation()); } else { throw new UnsupportedOperationException("Serialisation of commands is not fully implemented yet."); } } - result.append(Serializer.STATEMENT_SEPARATOR); + result.append(OldSerializer.STATEMENT_SEPARATOR); return result.toString(); } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java index be4e9f9ee..2b7511580 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java @@ -22,7 +22,7 @@ import java.util.List; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; /** * Interface for representing conjunctions of {@link Literal}s, i.e., lists of @@ -43,7 +43,7 @@ public interface Conjunction extends Iterable, SyntaxObjec @Override default String getSyntacticRepresentation() { - return Serializer.getString(this); + return OldSerializer.getString(this); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSource.java index 402d80127..f8afe3ee8 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSource.java @@ -40,4 +40,13 @@ public default Optional getRequiredArity() { return Optional.empty(); } + /** + * Returns a fact that represents the declaration of this {@link DataSource}. + * Rulewerk syntax uses facts to specify the relevant parameters for data source + * declarations. + * + * @return {@link Fact} that contains the parameters of this data source + */ + public Fact getDeclarationFact(); + } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java index da1837bba..c609e1a03 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java @@ -1,6 +1,6 @@ package org.semanticweb.rulewerk.core.model.api; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; /*- * #%L @@ -47,6 +47,6 @@ public interface DataSourceDeclaration extends Statement, Entity { @Override default String getSyntacticRepresentation() { - return Serializer.getString(this); + return OldSerializer.getString(this); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java index 28fbc87d5..8f4f7c414 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java @@ -22,7 +22,7 @@ import java.util.function.Function; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; /** * Interface for datatype constants, i.e. for constants that represent a @@ -64,11 +64,11 @@ default TermType getType() { @Override default String getSyntacticRepresentation() { - return Serializer.getString(this); + return OldSerializer.getString(this); } @Override default String getSyntacticRepresentation(Function iriTransformer) { - return Serializer.getString(this, iriTransformer); + return OldSerializer.getString(this, iriTransformer); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/ExistentialVariable.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/ExistentialVariable.java index 4e7d60d78..35f08ab39 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/ExistentialVariable.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/ExistentialVariable.java @@ -20,7 +20,7 @@ * #L% */ -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; /** * Interface for existentially quantified variables, i.e., variables that appear @@ -37,6 +37,6 @@ default TermType getType() { @Override default String getSyntacticRepresentation() { - return Serializer.getString(this); + return OldSerializer.getString(this); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Fact.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Fact.java index 61a302e32..5b1fc9df2 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Fact.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Fact.java @@ -1,6 +1,6 @@ package org.semanticweb.rulewerk.core.model.api; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; /*- * #%L @@ -33,7 +33,7 @@ public interface Fact extends PositiveLiteral, Statement { @Override default String getSyntacticRepresentation() { - return Serializer.getFactString(this); + return OldSerializer.getFactString(this); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/LanguageStringConstant.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/LanguageStringConstant.java index fbd60d57b..2d7d082d5 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/LanguageStringConstant.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/LanguageStringConstant.java @@ -22,7 +22,7 @@ import java.util.function.Function; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; /** * Interface for string constants with a language tag, used to represent values @@ -67,7 +67,7 @@ default String getDatatype() { @Override default String getSyntacticRepresentation() { - return Serializer.getConstantName(this); + return OldSerializer.getConstantName(this); } @Override diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java index b278f7722..d40d3a04a 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java @@ -22,7 +22,7 @@ import java.util.List; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; /** * Interface for literals. A positive literal is simply an atomic formula, i.e., @@ -54,7 +54,7 @@ public interface Literal extends SyntaxObject, Entity { @Override default String getSyntacticRepresentation() { - return Serializer.getString(this); + return OldSerializer.getString(this); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NamedNull.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NamedNull.java index 4b1350265..b78a6be6d 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NamedNull.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NamedNull.java @@ -22,7 +22,7 @@ import java.util.function.Function; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; /** * Interface for {@link TermType#NAMED_NULL} terms. A blank is an entity used to @@ -40,7 +40,7 @@ default TermType getType() { @Override default String getSyntacticRepresentation() { - return Serializer.getString(this); + return OldSerializer.getString(this); } @Override diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Predicate.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Predicate.java index e34ec9b24..5ff3a4daa 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Predicate.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Predicate.java @@ -20,7 +20,7 @@ * #L% */ -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; /** * A Predicate represents a relation between terms. Is uniquely identified by @@ -51,6 +51,6 @@ public interface Predicate extends Entity { @Override default String getSyntacticRepresentation() { - return Serializer.getString(this); + return OldSerializer.getString(this); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java index 9727b6e1b..fd82ade16 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java @@ -119,10 +119,12 @@ public interface PrefixDeclarationRegistry extends Iterable" : iri; String baseIri = getBaseIri(); if (baseIri != PrefixDeclarationRegistry.EMPTY_BASE && iri.length() > baseIri.length() && iri.startsWith(baseIri)) { - shortestIri = iri.substring(baseIri.length()); + String shorterIri = iri.substring(baseIri.length()); // Only allow very simple names of this form, to avoid confusion, e.g., with // numbers or boolean literals: - if (!shortestIri.matches("^[a-zA-Z]([/a-zA-Z0-9_-])*$") || "true".equals(shortestIri) - || "false".equals(shortestIri)) { - shortestIri = iri; + if (shorterIri.matches("^[a-zA-Z]([/a-zA-Z0-9_-])*$") && !"true".equals(shorterIri) + || !"false".equals(shorterIri)) { + shortestIri = shorterIri; } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java index f81e44681..f58e13002 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java @@ -65,7 +65,7 @@ public String toString() { @Override public String getName() { - return Serializer.getConstantName(this); + return OldSerializer.getConstantName(this); } @Override diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/LanguageStringConstantImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/LanguageStringConstantImpl.java index 235bfcd31..8a7ae5ca1 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/LanguageStringConstantImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/LanguageStringConstantImpl.java @@ -44,7 +44,7 @@ public LanguageStringConstantImpl(String string, String languageTag) { @Override public String getName() { - return Serializer.getConstantName(this); + return OldSerializer.getConstantName(this); } @Override diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index 7ae74f80b..23d4c238c 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -1,558 +1,462 @@ -package org.semanticweb.rulewerk.core.model.implementation; - -/*- - * #%L - * Rulewerk Core Components - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.util.List; -import java.util.Map.Entry; -import java.util.function.Function; - -import org.semanticweb.rulewerk.core.model.api.AbstractConstant; -import org.semanticweb.rulewerk.core.model.api.Conjunction; -import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; -import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; -import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; -import org.semanticweb.rulewerk.core.model.api.Fact; -import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; -import org.semanticweb.rulewerk.core.model.api.Literal; -import org.semanticweb.rulewerk.core.model.api.NamedNull; -import org.semanticweb.rulewerk.core.model.api.Predicate; -import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; -import org.semanticweb.rulewerk.core.model.api.Rule; -import org.semanticweb.rulewerk.core.model.api.Term; -import org.semanticweb.rulewerk.core.model.api.UniversalVariable; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; - -/** - * A utility class with static methods to obtain the correct parsable string - * representation of the different data models. - * - * @author Ali Elhalawati - * - */ -public final class Serializer { - private static final String NEW_LINE = "\n"; - public static final String STATEMENT_SEPARATOR = " ."; - public static final String COMMA = ", "; - public static final String NEGATIVE_IDENTIFIER = "~"; - public static final String EXISTENTIAL_IDENTIFIER = "!"; - public static final String UNIVERSAL_IDENTIFIER = "?"; - public static final String NAMEDNULL_IDENTIFIER = "_:"; - public static final String OPENING_PARENTHESIS = "("; - public static final String CLOSING_PARENTHESIS = ")"; - public static final String OPENING_BRACKET = "["; - public static final String CLOSING_BRACKET = "]"; - public static final String RULE_SEPARATOR = " :- "; - public static final char AT = '@'; - public static final String DATA_SOURCE = "@source "; - public static final String BASE = "@base "; - public static final String PREFIX = "@prefix "; - public static final String CSV_FILE_DATA_SOURCE = "load-csv"; - public static final String RDF_FILE_DATA_SOURCE = "load-rdf"; - public static final String SPARQL_QUERY_RESULT_DATA_SOURCE = "sparql"; - public static final String DATA_SOURCE_SEPARATOR = ": "; - public static final String COLON = ":"; - public static final String DOUBLE_CARET = "^^"; - public static final char LESS_THAN = '<'; - public static final char MORE_THAN = '>'; - public static final char QUOTE = '"'; - - public static final String REGEX_DOUBLE = "^[-+]?[0-9]+[.]?[0-9]*([eE][-+]?[0-9]+)?$"; - public static final String REGEX_INTEGER = "^[-+]?\\d+$"; - public static final String REGEX_DECIMAL = "^(\\d*\\.)?\\d+$"; - public static final String REGEX_TRUE = "true"; - public static final String REGEX_FALSE = "false"; - - /** - * Constructor. - */ - private Serializer() { - - } - - /** - * Creates a String representation of a given {@link Rule}. - * - * @see Rule - * syntax - * @param rule a {@link Rule}. - * @return String representation corresponding to a given {@link Rule}. - * - */ - public static String getString(final Rule rule) { - return getString(rule.getHead()) + RULE_SEPARATOR + getString(rule.getBody()) + STATEMENT_SEPARATOR; - } - - /** - * Creates a String representation of a given {@link Conjunction}. - * - * @see Rule - * syntax - * @param conjunction a {@link Conjunction} - * @return String representation corresponding to a given {@link Conjunction}. - */ - public static String getString(final Conjunction conjunction) { - final StringBuilder stringBuilder = new StringBuilder(); - boolean first = true; - for (final Literal literal : conjunction.getLiterals()) { - if (first) { - first = false; - } else { - stringBuilder.append(COMMA); - } - stringBuilder.append(getString(literal)); - } - return stringBuilder.toString(); - } - - /** - * Creates a String representation of a given {@link Literal}. - * - * @see Rule - * syntax - * @param literal a {@link Literal} - * @return String representation corresponding to a given {@link Literal}. - */ - public static String getString(final Literal literal) { - final StringBuilder stringBuilder = new StringBuilder(""); - if (literal.isNegated()) { - stringBuilder.append(NEGATIVE_IDENTIFIER); - } - stringBuilder.append(getString(literal.getPredicate(), literal.getArguments())); - return stringBuilder.toString(); - } - - /** - * Creates a String representation of a given {@link Fact}. - * - * @see Rule - * syntax - * @param fact a {@link Fact} - * @return String representation corresponding to a given {@link Fact}. - */ - public static String getFactString(final Fact fact) { - return getString(fact) + STATEMENT_SEPARATOR; - } - - /** - * Creates a String representation of a given {@link AbstractConstant}. - * - * @see Rule - * syntax - * @param constant a {@link AbstractConstant} - * @param iriTransformer a function to transform IRIs with. - * @return String representation corresponding to a given - * {@link AbstractConstant}. - */ - public static String getString(final AbstractConstant constant, final Function iriTransformer) { - return getIRIString(constant.getName(), iriTransformer); - } - - /** - * Creates a String representation of a given {@link AbstractConstant}. - * - * @see Rule - * syntax - * @param constant a {@link AbstractConstant} - * @return String representation corresponding to a given - * {@link AbstractConstant}. - */ - public static String getString(final AbstractConstant constant) { - return getIRIString(constant.getName()); - } - - /** - * Creates a String representation corresponding to the name of a given - * {@link LanguageStringConstant}. - * - * @see Rule - * syntax - * @param languageStringConstant a {@link LanguageStringConstant} - * @return String representation corresponding to the name of a given - * {@link LanguageStringConstant}. - */ - public static String getConstantName(final LanguageStringConstant languageStringConstant) { - return getString(languageStringConstant.getString()) + AT + languageStringConstant.getLanguageTag(); - } - - /** - * Creates a String representation corresponding to the given - * {@link DatatypeConstant}. For datatypes that have specialised lexical - * representations (i.e., xsd:String, xsd:Decimal, xsd:Integer, and xsd:Double), - * this representation is returned, otherwise the result is a generic literal - * with full datatype IRI. - * - * examples: - *
      - *
    • {@code "string"^^xsd:String} results in {@code "string"},
    • - *
    • {@code "23.0"^^xsd:Decimal} results in {@code 23.0},
    • - *
    • {@code "42"^^xsd:Integer} results in {@code 42},
    • - *
    • {@code "23.42"^^xsd:Double} results in {@code 23.42E0}, and
    • - *
    • {@code "test"^^} results in - * {@code "test"^^}, modulo transformation of the datatype - * IRI.
    • - *
    - * - * @see Rule - * syntax - * @param datatypeConstant a {@link DatatypeConstant} - * @param iriTransformer a function to transform IRIs with. - * @return String representation corresponding to a given - * {@link DatatypeConstant}. - */ - public static String getString(final DatatypeConstant datatypeConstant, - final Function iriTransformer) { - if (datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_STRING)) { - return getString(datatypeConstant.getLexicalValue()); - } else if (datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_DECIMAL) - || datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_INTEGER) - || datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_DOUBLE)) { - return datatypeConstant.getLexicalValue(); - } - - return getConstantName(datatypeConstant, iriTransformer); - } - - /** - * Creates a String representation corresponding to the given - * {@link DatatypeConstant}. For datatypes that have specialised lexical - * representations (i.e., xsd:String, xsd:Decimal, xsd:Integer, and xsd:Double), - * this representation is returned, otherwise the result is a generic literal - * with full datatype IRI. - * - * examples: - *
      - *
    • {@code "string"^^xsd:String} results in {@code "string"},
    • - *
    • {@code "23.0"^^xsd:Decimal} results in {@code 23.0},
    • - *
    • {@code "42"^^xsd:Integer} results in {@code 42},
    • - *
    • {@code "23.42"^^xsd:Double} results in {@code 23.42E0}, and
    • - *
    • {@code "test"^^} results in - * {@code "test"^^}.
    • - *
    - * - * @param datatypeConstant a {@link DatatypeConstant} - * @return String representation corresponding to a given - * {@link DatatypeConstant}. - */ - public static String getString(final DatatypeConstant datatypeConstant) { - return getString(datatypeConstant, Function.identity()); - } - - /** - * Creates a String representation corresponding to the name of a given - * {@link DatatypeConstant} including an IRI. - * - * @see Rule - * syntax - * @param datatypeConstant a {@link DatatypeConstant} - * @return String representation corresponding to a given - * {@link DatatypeConstant}. - */ - private static String getConstantName(final DatatypeConstant datatypeConstant, - final Function iriTransformer) { - return getString(datatypeConstant.getLexicalValue()) + DOUBLE_CARET - + getIRIString(datatypeConstant.getDatatype(), iriTransformer); - } - - /** - * Creates a String representation corresponding to the name of a given - * {@link DatatypeConstant} including an IRI. - * - * @see Rule - * syntax - * @param datatypeConstant a {@link DatatypeConstant} - * @return String representation corresponding to a given - * {@link DatatypeConstant}. - */ - public static String getConstantName(final DatatypeConstant datatypeConstant) { - return getString(datatypeConstant.getLexicalValue()) + DOUBLE_CARET - + addAngleBrackets(datatypeConstant.getDatatype()); - } - - /** - * Creates a String representation of a given {@link ExistentialVariable}. - * - * @see Rule - * syntax - * @param existentialVariable a {@link ExistentialVariable} - * @return String representation corresponding to a given - * {@link ExistentialVariable}. - */ - public static String getString(final ExistentialVariable existentialVariable) { - return EXISTENTIAL_IDENTIFIER + existentialVariable.getName(); - } - - /** - * Creates a String representation of a given {@link UniversalVariable}. - * - * @see Rule - * syntax - * @param universalVariable a {@link UniversalVariable} - * @return String representation corresponding to a given - * {@link UniversalVariable}. - */ - public static String getString(final UniversalVariable universalVariable) { - return UNIVERSAL_IDENTIFIER + universalVariable.getName(); - } - - /** - * Creates a String representation of a given {@link NamedNull}. - * - * @see Rule - * syntax - * @param namedNull a {@link NamedNull} - * @return String representation corresponding to a given {@link NamedNull}. - */ - public static String getString(final NamedNull namedNull) { - return NAMEDNULL_IDENTIFIER + namedNull.getName(); - } - - /** - * Creates a String representation of a given {@link Predicate}. - * - * @see Rule - * syntax - * @param predicate a {@link Predicate} - * @return String representation corresponding to a given {@link Predicate}. - */ - public static String getString(final Predicate predicate) { - return predicate.getName() + OPENING_BRACKET + predicate.getArity() + CLOSING_BRACKET; - } - - /** - * Creates a String representation of a given {@link DataSourceDeclaration}. - * - * @see Rule - * syntax - * @param dataSourceDeclaration a {@link DataSourceDeclaration} - * @return String representation corresponding to a given - * {@link DataSourceDeclaration}. - */ - public static String getString(final DataSourceDeclaration dataSourceDeclaration) { - return DATA_SOURCE + getString(dataSourceDeclaration.getPredicate()) + DATA_SOURCE_SEPARATOR - + dataSourceDeclaration.getDataSource().getSyntacticRepresentation() + STATEMENT_SEPARATOR; - } - - /** - * Creates a String representation of a given {@link CsvFileDataSource}. - * - * @see Rule - * syntax - * - * @param csvFileDataSource - * @return String representation corresponding to a given - * {@link CsvFileDataSource}. - */ - public static String getString(final CsvFileDataSource csvFileDataSource) { - return CSV_FILE_DATA_SOURCE + OPENING_PARENTHESIS + getFileString(csvFileDataSource) + CLOSING_PARENTHESIS; - } - - /** - * Creates a String representation of a given {@link RdfFileDataSource}. - * - * @see Rule - * syntax - * - * - * @param rdfFileDataSource - * @return String representation corresponding to a given - * {@link RdfFileDataSource}. - */ - public static String getString(final RdfFileDataSource rdfFileDataSource) { - return RDF_FILE_DATA_SOURCE + OPENING_PARENTHESIS + getFileString(rdfFileDataSource) + CLOSING_PARENTHESIS; - } - - /** - * Creates a String representation of a given - * {@link SparqlQueryResultDataSource}. - * - * @see Rule - * syntax - * - * - * @param dataSource - * @return String representation corresponding to a given - * {@link SparqlQueryResultDataSource}. - */ - public static String getString(final SparqlQueryResultDataSource dataSource) { - return SPARQL_QUERY_RESULT_DATA_SOURCE + OPENING_PARENTHESIS - + addAngleBrackets(dataSource.getEndpoint().toString()) + COMMA - + addQuotes(dataSource.getQueryVariables()) + COMMA + addQuotes(dataSource.getQueryBody()) - + CLOSING_PARENTHESIS; - } - - private static String getFileString(final FileDataSource fileDataSource) { - return getString(fileDataSource.getPath()); - } - - private static String getIRIString(final String string) { - return getIRIString(string, Function.identity()); - } - - private static String getIRIString(final String string, final Function iriTransformer) { - final String transformed = iriTransformer.apply(string); - - if (!transformed.equals(string)) { - return transformed; - } - - if (string.contains(COLON) || string.matches(REGEX_INTEGER) || string.matches(REGEX_DOUBLE) - || string.matches(REGEX_DECIMAL) || string.equals(REGEX_TRUE) || string.equals(REGEX_FALSE)) { - return addAngleBrackets(string); - } - - return string; - } - - /** - * Constructs the parseable, serialized representation of given {@code string}. - * Escapes (with {@code \}) special character occurrences in given - * {@code string}, and surrounds the result with double quotation marks - * ({@code "}). The special characters are: - *
      - *
    • {@code \}
    • - *
    • {@code "}
    • - *
    • {@code \t}
    • - *
    • {@code \b}
    • - *
    • {@code \n}
    • - *
    • {@code \r}
    • - *
    • {@code \f}
    • - *
    - * Example for {@code string = "\\a"}, the returned value is - * {@code string = "\"\\\\a\""} - * - * @param string - * @return an escaped string surrounded by {@code "}. - */ - public static String getString(final String string) { - return addQuotes(escape(string)); - } - - /** - * Escapes (with {@code \}) special character occurrences in given - * {@code string}. The special characters are: - *
      - *
    • {@code \}
    • - *
    • {@code "}
    • - *
    • {@code \t}
    • - *
    • {@code \b}
    • - *
    • {@code \n}
    • - *
    • {@code \r}
    • - *
    • {@code \f}
    • - *
    - * - * @param string - * @return an escaped string - */ - private static String escape(final String string) { - return string.replace("\\", "\\\\").replace("\"", "\\\"").replace("\t", "\\t").replace("\b", "\\b") - .replace(NEW_LINE, "\\n").replace("\r", "\\r").replace("\f", "\\f"); - // don't touch single quotes here since we only construct double-quoted strings - } - - private static String addQuotes(final String string) { - return QUOTE + string + QUOTE; - } - - private static String addAngleBrackets(final String string) { - return LESS_THAN + string + MORE_THAN; - } - - public static String getFactString(final Predicate predicate, final List terms) { - return getString(predicate, terms) + STATEMENT_SEPARATOR + NEW_LINE; - } - - public static String getFactString(final Predicate predicate, final List terms, - final Function iriTransformer) { - return getString(predicate, terms, iriTransformer) + STATEMENT_SEPARATOR + NEW_LINE; - } - - public static String getString(final Predicate predicate, final List terms) { - return getString(predicate, terms, Function.identity()); - } - - public static String getString(final Predicate predicate, final List terms, - final Function iriTransformer) { - final StringBuilder stringBuilder = new StringBuilder(getIRIString(predicate.getName(), iriTransformer)); - stringBuilder.append(OPENING_PARENTHESIS); - - boolean first = true; - for (final Term term : terms) { - if (first) { - first = false; - } else { - stringBuilder.append(COMMA); - } - final String string = term.getSyntacticRepresentation(iriTransformer); - stringBuilder.append(string); - } - stringBuilder.append(CLOSING_PARENTHESIS); - return stringBuilder.toString(); - } - - public static String getBaseString(final KnowledgeBase knowledgeBase) { - final String baseIri = knowledgeBase.getBaseIri(); - - return baseIri.equals(PrefixDeclarationRegistry.EMPTY_BASE) ? baseIri : getBaseDeclarationString(baseIri); - } - - private static String getBaseDeclarationString(final String baseIri) { - return BASE + addAngleBrackets(baseIri) + STATEMENT_SEPARATOR + NEW_LINE; - } - - public static String getPrefixString(final Entry prefix) { - return PREFIX + prefix.getKey() + " " + addAngleBrackets(prefix.getValue()) + STATEMENT_SEPARATOR + NEW_LINE; - } - - public static String getBaseAndPrefixDeclarations(final KnowledgeBase knowledgeBase) { - final StringBuilder sb = new StringBuilder(); - - sb.append(getBaseString(knowledgeBase)); - knowledgeBase.getPrefixes().forEachRemaining(prefix -> sb.append(getPrefixString(prefix))); - - return sb.toString(); - } - - public static String getCommandName(final String commandName) { - return AT + commandName; - } -} +package org.semanticweb.rulewerk.core.model.implementation; + +import java.io.IOException; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.Writer; +import java.util.function.Function; + +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; +import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Statement; +import org.semanticweb.rulewerk.core.model.api.StatementVisitor; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.TermVisitor; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; + +/** + * Objects of this class are used to create string representations of syntactic + * objects. + * + * @see RuleWerk + * rule syntax + * + * @author Markus Kroetzsch + * + */ +public class Serializer { + + public static final String STATEMENT_END = " ."; + + /** + * Default IRI serializer that can be used if no abbreviations (prefixes, base, + * etc.) are used. + */ + public static final Function identityIriSerializer = new Function() { + @Override + public String apply(String iri) { + return iri.contains(":") ? "<" + iri + ">" : iri; + } + }; + + final Writer writer; + final Function iriTransformer; + final SerializerTermVisitor serializerTermVisitor = new SerializerTermVisitor(); + final SerializerStatementVisitor serializerStatementVisitor = new SerializerStatementVisitor(); + + /** + * Runtime exception used to report errors that occurred in visitors that do not + * declare checked exceptions. + * + * @author Markus Kroetzsch + * + */ + private class RuntimeIoException extends RuntimeException { + private static final long serialVersionUID = 1L; + final IOException cause; + + public RuntimeIoException(IOException cause) { + super(cause); + this.cause = cause; + } + + public IOException getIoException() { + return cause; + } + } + + /** + * Auxiliary class to visit {@link Term} objects for writing. + * + * @author Markus Kroetzsch + * + */ + private class SerializerTermVisitor implements TermVisitor { + + @Override + public Void visit(AbstractConstant term) { + try { + Serializer.this.writeAbstractConstant(term); + } catch (IOException e) { + throw new RuntimeIoException(e); + } + return null; + } + + @Override + public Void visit(DatatypeConstant term) { + try { + Serializer.this.writeDatatypeConstant(term); + } catch (IOException e) { + throw new RuntimeIoException(e); + } + return null; + } + + @Override + public Void visit(LanguageStringConstant term) { + try { + Serializer.this.writeLanguageStringConstant(term); + } catch (IOException e) { + throw new RuntimeIoException(e); + } + return null; + } + + @Override + public Void visit(UniversalVariable term) { + try { + Serializer.this.writeUniversalVariable(term); + } catch (IOException e) { + throw new RuntimeIoException(e); + } + return null; + } + + @Override + public Void visit(ExistentialVariable term) { + try { + Serializer.this.writeExistentialVariable(term); + } catch (IOException e) { + throw new RuntimeIoException(e); + } + return null; + } + + @Override + public Void visit(NamedNull term) { + try { + Serializer.this.writeNamedNull(term); + } catch (IOException e) { + throw new RuntimeIoException(e); + } + return null; + } + + } + + /** + * Auxiliary class to visit {@link Statement} objects for writing. + * + * @author Markus Kroetzsch + * + */ + private class SerializerStatementVisitor implements StatementVisitor { + + @Override + public Void visit(Fact statement) { + try { + Serializer.this.writeFact(statement); + } catch (IOException e) { + throw new RuntimeIoException(e); + } + return null; + } + + @Override + public Void visit(Rule statement) { + try { + Serializer.this.writeRule(statement); + } catch (IOException e) { + throw new RuntimeIoException(e); + } + return null; + } + + @Override + public Void visit(DataSourceDeclaration statement) { + try { + Serializer.this.writeDataSourceDeclaration(statement); + } catch (IOException e) { + throw new RuntimeIoException(e); + } + return null; + } + + } + + /** + * Construct a serializer that uses a specific function to serialize IRIs. + * + * @param writer the object used to write serializations + * @param iriTransformer a function used to abbreviate IRIs, e.g., if namespace + * prefixes were declared + */ + public Serializer(final Writer writer, final Function iriTransformer) { + this.writer = writer; + this.iriTransformer = iriTransformer; + } + + /** + * Construct a serializer that serializes IRIs without any form of + * transformation or abbreviation. + * + * @param writer the object used to write serializations + */ + public Serializer(final Writer writer) { + this(writer, identityIriSerializer); + } + + /** + * Construct a serializer that uses the given {@link PrefixDeclarationRegistry} + * to abbreviate IRIs. + * + * @param writer the object used to write serializations + * @param prefixDeclarationRegistry the object used to abbreviate IRIs + */ + public Serializer(final Writer writer, PrefixDeclarationRegistry prefixDeclarationRegistry) { + this(writer, (string) -> { + return prefixDeclarationRegistry.unresolveAbsoluteIri(string, true); + }); + } + + /** + * Writes a serialization of the given {@link Statement}. + * + * @param term a {@link Statement} + * @throws IOException + */ + public void writeStatement(Statement statement) throws IOException { + try { + statement.accept(this.serializerStatementVisitor); + } catch (Serializer.RuntimeIoException e) { + throw e.getIoException(); + } + } + + /** + * Writes a serialization of the given {@link Fact}. + * + * @param fact a {@link Fact} + * @throws IOException + */ + public void writeFact(Fact fact) throws IOException { + writeLiteral(fact); + writer.write(STATEMENT_END); + } + + /** + * Writes a serialization of the given {@link Rule}. + * + * @param rule a {@link Rule} + * @throws IOException + */ + public void writeRule(Rule rule) throws IOException { + writeLiteralConjunction(rule.getHead()); + writer.write(" :- "); + writeLiteralConjunction(rule.getBody()); + writer.write(STATEMENT_END); + } + + /** + * Writes a serialization of the given {@link DataSourceDeclaration}. + * + * @param dataSourceDeclaration a {@link DataSourceDeclaration} + * @throws IOException + */ + public void writeDataSourceDeclaration(DataSourceDeclaration dataSourceDeclaration) throws IOException { + writer.write("@source "); + writePredicate(dataSourceDeclaration.getPredicate()); + writer.write(": "); + writeLiteral(dataSourceDeclaration.getDataSource().getDeclarationFact()); + writer.write(STATEMENT_END); + } + + /** + * Writes a serialization of the given {@link Literal}. + * + * @param literal a {@link Literal} + * @throws IOException + */ + public void writeLiteral(Literal literal) throws IOException { + writer.write(getIri(literal.getPredicate().getName())); + writer.write("("); + + boolean first = true; + for (final Term term : literal.getArguments()) { + if (first) { + first = false; + } else { + writer.write(", "); + } + writeTerm(term); + } + + writer.write(")"); + } + + /** + * Writes a serialization of the given {@link Conjunction} of {@link Literal} + * objects. + * + * @param literals a {@link Conjunction} + * @throws IOException + */ + public void writeLiteralConjunction(final Conjunction literals) throws IOException { + boolean first = true; + for (final Literal literal : literals.getLiterals()) { + if (first) { + first = false; + } else { + writer.write(", "); + } + writeLiteral(literal); + } + } + + /** + * Writes a serialization of the given {@link Predicate}. This serialization + * specifies the name and arity of the predicate. + * + * @param predicate a {@link Predicate} + * @throws IOException + */ + public void writePredicate(Predicate predicate) throws IOException { + writer.write(getIri(predicate.getName())); + writer.write("["); + writer.write(String.valueOf(predicate.getArity())); + writer.write("]"); + } + + /** + * Writes a serialization of the given {@link Term}. + * + * @param term a {@link Term} + * @throws IOException + */ + public void writeTerm(Term term) throws IOException { + try { + term.accept(this.serializerTermVisitor); + } catch (Serializer.RuntimeIoException e) { + throw e.getIoException(); + } + } + + /** + * Writes a serialization of the given {@link AbstractConstant}. + * + * @param abstractConstant a {@link AbstractConstant} + * @throws IOException + */ + public void writeAbstractConstant(AbstractConstant abstractConstant) throws IOException { + writer.write(getIri(abstractConstant.getName())); + } + + /** + * Writes a serialization of the given {@link DatatypeConstant}. + * + * @param datatypeConstant a {@link DatatypeConstant} + * @throws IOException + */ + public void writeDatatypeConstant(DatatypeConstant datatypeConstant) throws IOException { + if (PrefixDeclarationRegistry.XSD_STRING.equals(datatypeConstant.getDatatype())) { + writer.write(getQuotedString(datatypeConstant.getLexicalValue())); + } else if (PrefixDeclarationRegistry.XSD_INTEGER.equals(datatypeConstant.getDatatype())) { + writer.write(datatypeConstant.getLexicalValue()); + } else { + writer.write(getQuotedString(datatypeConstant.getLexicalValue())); + writer.write("^^"); + writer.write(getIri(datatypeConstant.getDatatype())); + } + } + + /** + * Writes a serialization of the given {@link UniversalVariable}. + * + * @param universalVariable a {@link UniversalVariable} + * @throws IOException + */ + public void writeUniversalVariable(UniversalVariable universalVariable) throws IOException { + writer.write("?"); + writer.write(universalVariable.getName()); + } + + /** + * Writes a serialization of the given {@link ExistentialVariable}. + * + * @param existentialVariable a {@link ExistentialVariable} + * @throws IOException + */ + public void writeExistentialVariable(ExistentialVariable existentialVariable) throws IOException { + writer.write("!"); + writer.write(existentialVariable.getName()); + } + + /** + * Writes a serialization of the given {@link NamedNull}. + * + * @param namedNull a {@link NamedNull} + * @throws IOException + */ + public void writeNamedNull(NamedNull namedNull) throws IOException { + writer.write("_:"); + writer.write(namedNull.getName()); + } + + /** + * Writes a serialization of the given {@link LanguageStringConstant}. + * + * @param languageStringConstant a {@link LanguageStringConstant} + * @throws IOException + */ + public void writeLanguageStringConstant(LanguageStringConstant languageStringConstant) throws IOException { + writer.write(getQuotedString(languageStringConstant.getString())); + writer.write("@"); + writer.write(languageStringConstant.getLanguageTag()); + } + + /** + * Escapes (with {@code \}) special character occurrences in given + * {@code string}. The special characters are: + *
      + *
    • {@code \}
    • + *
    • {@code "}
    • + *
    • {@code \t}
    • + *
    • {@code \b}
    • + *
    • {@code \n}
    • + *
    • {@code \r}
    • + *
    • {@code \f}
    • + *
    + * + * @param string + * @return an escaped string + */ + private String getQuotedString(final String string) { + return "\"" + string.replace("\\", "\\\\").replace("\"", "\\\"").replace("\t", "\\t").replace("\b", "\\b") + .replace("\n", "\\n").replace("\r", "\\r").replace("\f", "\\f") + "\""; + } + + private String getIri(final String string) { + return iriTransformer.apply(string); + } +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java index b9f1d3ee9..e81407f7b 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -53,7 +53,7 @@ import org.semanticweb.rulewerk.core.model.api.Statement; import org.semanticweb.rulewerk.core.model.api.StatementVisitor; import org.semanticweb.rulewerk.core.model.implementation.MergingPrefixDeclarationRegistry; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; /** * A knowledge base with rules, facts, and declarations for loading data from @@ -577,7 +577,7 @@ public String resolvePrefixedName(String prefixedName) throws PrefixDeclarationE * declared prefixes, or {@code iri} if no suitable prefix is declared. */ public String unresolveAbsoluteIri(String iri) { - return this.prefixDeclarationRegistry.unresolveAbsoluteIri(iri); + return this.prefixDeclarationRegistry.unresolveAbsoluteIri(iri, false); } /** @@ -589,20 +589,20 @@ public String unresolveAbsoluteIri(String iri) { * stream */ public void writeKnowledgeBase(Writer writer) throws IOException { - writer.write(Serializer.getBaseAndPrefixDeclarations(this)); + writer.write(OldSerializer.getBaseAndPrefixDeclarations(this)); for (DataSourceDeclaration dataSource : this.getDataSourceDeclarations()) { - writer.write(Serializer.getString(dataSource)); + writer.write(OldSerializer.getString(dataSource)); writer.write('\n'); } for (Fact fact : this.getFacts()) { - writer.write(Serializer.getFactString(fact)); + writer.write(OldSerializer.getFactString(fact)); writer.write('\n'); } for (Rule rule : this.getRules()) { - writer.write(Serializer.getString(rule)); + writer.write(OldSerializer.getString(rule)); writer.write('\n'); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java index 276398c62..ff9a2d8a0 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java @@ -42,7 +42,7 @@ import org.semanticweb.rulewerk.core.model.api.TermType; import org.semanticweb.rulewerk.core.model.api.Variable; import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; /** * Interface that exposes the (existential) rule reasoning capabilities of a @@ -133,9 +133,9 @@ default Correctness unsafeForEachInference(BiConsumer> act */ default Correctness writeInferences(OutputStream stream) throws IOException { final KnowledgeBase knowledgeBase = getKnowledgeBase(); - stream.write(Serializer.getBaseAndPrefixDeclarations(knowledgeBase).getBytes()); + stream.write(OldSerializer.getBaseAndPrefixDeclarations(knowledgeBase).getBytes()); return forEachInference((predicate, termList) -> stream - .write(Serializer.getFactString(predicate, termList, knowledgeBase::unresolveAbsoluteIri).getBytes())); + .write(OldSerializer.getFactString(predicate, termList, knowledgeBase::unresolveAbsoluteIri).getBytes())); } /** diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java index 7f72f25de..c3347b47a 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java @@ -23,7 +23,8 @@ import java.io.IOException; import java.util.Arrays; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; /** * An {@code CsvFileDataSource} stores facts in the CSV format inside a file of @@ -74,11 +75,16 @@ public String toString() { @Override public String getSyntacticRepresentation() { - return Serializer.getString(this); + return OldSerializer.getString(this); } @Override public void accept(DataSourceConfigurationVisitor visitor) throws IOException { visitor.visit(this); } + + @Override + String getDeclarationPredicateName() { + return "load-csv"; + } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java index 46ec295de..68cbf2f68 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java @@ -27,6 +27,10 @@ import java.util.stream.StreamSupport; import org.apache.commons.lang3.Validate; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; /** * A {@code FileDataSource} is an abstract implementation of a storage for fact @@ -113,4 +117,19 @@ public boolean equals(final Object obj) { return this.file.equals(other.getFile()); } + @Override + public Fact getDeclarationFact() { + Predicate predicate = Expressions.makePredicate(getDeclarationPredicateName(), 1); + return Expressions.makeFact(predicate, + Expressions.makeDatatypeConstant(getPath(), PrefixDeclarationRegistry.XSD_STRING)); + } + + /** + * Returns the name of the predicate that is used to define a declaration of + * this data source. + * + * @return + */ + abstract String getDeclarationPredicateName(); + } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java index 57231434a..f7f933253 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java @@ -73,7 +73,7 @@ public boolean equals(Object obj) { @Override public String toString() { - return "QueryResult [terms=" + this.terms + "]"; + return this.terms.toString(); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java index 9df6c5d26..3dc4c9d2b 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java @@ -24,7 +24,7 @@ import java.util.Arrays; import java.util.Optional; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; /** * An {@code RdfFileDataSource} stores facts in the RDF N-Triples format inside @@ -73,7 +73,7 @@ public String toString() { @Override public String getSyntacticRepresentation() { - return Serializer.getString(this); + return OldSerializer.getString(this); } @Override @@ -85,4 +85,9 @@ public Optional getRequiredArity() { public void accept(DataSourceConfigurationVisitor visitor) throws IOException { visitor.visit(this); } + + @Override + String getDeclarationPredicateName() { + return "load-rdf"; + } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java index 99f8548e0..b89e22068 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java @@ -26,8 +26,13 @@ import java.util.Optional; import org.apache.commons.lang3.Validate; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.api.Variable; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; /** * A SparqlQueryResultDataSource provide the results of a SPARQL query on a @@ -38,8 +43,6 @@ */ public class SparqlQueryResultDataSource implements ReasonerDataSource { - - private final URL endpoint; private final String queryVariables; private final String queryBody; @@ -82,8 +85,8 @@ public SparqlQueryResultDataSource(final URL endpoint, final String queryVariabl */ // TODO add examples to javadoc // TODO add illegal argument exceptions to javadoc - public SparqlQueryResultDataSource(final URL endpoint, - final LinkedHashSet queryVariables, final String queryBody) { + public SparqlQueryResultDataSource(final URL endpoint, final LinkedHashSet queryVariables, + final String queryBody) { Validate.notNull(endpoint, "Endpoint cannot be null."); Validate.notNull(queryVariables, "Query variables ordered set cannot be null."); Validate.noNullElements(queryVariables, "Query variables cannot be null or contain null elements."); @@ -159,7 +162,7 @@ public String toString() { @Override public String getSyntacticRepresentation() { - return Serializer.getString(this); + return OldSerializer.getString(this); } @Override @@ -167,4 +170,14 @@ public void accept(DataSourceConfigurationVisitor visitor) { visitor.visit(this); } + @Override + public Fact getDeclarationFact() { + Predicate predicate = Expressions.makePredicate("sparql", 3); + Term endpointTerm = Expressions.makeAbstractConstant(getEndpoint().toString()); + Term variablesTerm = Expressions.makeDatatypeConstant(getQueryVariables(), + PrefixDeclarationRegistry.XSD_STRING); + Term patternTerm = Expressions.makeDatatypeConstant(getQueryBody(), PrefixDeclarationRegistry.XSD_STRING); + return Expressions.makeFact(predicate, endpointTerm, variablesTerm, patternTerm); + } + } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java index dfd658443..3c415a190 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java @@ -33,7 +33,7 @@ import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; @@ -89,7 +89,7 @@ public void toString_CsvFileDataSource() throws IOException { final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate2, unzippedCsvFileDataSource); - final String expectedFilePath = Serializer.getString(relativeDirName + fileName); + final String expectedFilePath = OldSerializer.getString(relativeDirName + fileName); assertEquals("@source q[1]: load-csv(" + expectedFilePath + ") .", dataSourceDeclaration.toString()); } @@ -114,7 +114,7 @@ public void toString_RdfFileDataSource_relativePath() throws IOException { final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, unzippedRdfFileDataSource); - final String expectedFilePath = Serializer.getString(relativeDirName + fileName); + final String expectedFilePath = OldSerializer.getString(relativeDirName + fileName); assertEquals("@source q[1]: load-rdf(" + expectedFilePath + ") .", dataSourceDeclaration.toString()); } } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java index 9bf049824..6705ba25d 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java @@ -91,7 +91,7 @@ public void resolvePrefixedName_unresolveAbsoluteIri_doesRoundTrip() throws Pref String prefix = "eg:"; prefixDeclarations.setPrefixIri(prefix, BASE); String resolved = BASE + RELATIVE; - String unresolved = prefixDeclarations.unresolveAbsoluteIri(resolved); + String unresolved = prefixDeclarations.unresolveAbsoluteIri(resolved, false); assertEquals(resolved, prefixDeclarations.resolvePrefixedName(unresolved)); } @@ -133,49 +133,49 @@ public void mergePrefixDeclarations_conflictingPrefixName_renamesConflictingPref @Test public void unresolveAbsoluteIri_default_identical() { - assertEquals(BASE, prefixDeclarations.unresolveAbsoluteIri(BASE)); + assertEquals(BASE, prefixDeclarations.unresolveAbsoluteIri(BASE, false)); } @Test public void unresolveAbsoluteIri_declaredPrefix_succeeds() { - assertEquals(MORE_SPECIFIC, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC)); + assertEquals(MORE_SPECIFIC, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC, false)); prefixDeclarations.setPrefixIri("eg:", BASE); - assertEquals("eg:example/", prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC)); + assertEquals("eg:example/", prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC, false)); } @Test public void unresolveAbsoluteIri_unrelatedPrefix_identical() { prefixDeclarations.setPrefixIri("eg:", UNRELATED); - assertEquals(MORE_SPECIFIC, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC)); + assertEquals(MORE_SPECIFIC, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC, false)); } @Test public void unresolveAbsoluteIri_unrelatedAndRelatedPrefixes_succeeds() { prefixDeclarations.setPrefixIri("ex:", UNRELATED); prefixDeclarations.setPrefixIri("eg:", BASE); - assertEquals("eg:example/", prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC)); + assertEquals("eg:example/", prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC, false)); } @Test public void unresolveAbsoluteIri_multipleMatchingPrefixes_longestMatchWins() { prefixDeclarations.setPrefixIri("eg:", BASE); prefixDeclarations.setPrefixIri("ex:", MORE_SPECIFIC); - assertEquals("ex:" + RELATIVE, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE)); + assertEquals("ex:" + RELATIVE, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE, false)); prefixDeclarations.setPrefixIri("er:", EVEN_MORE_SPECIFIC); - assertEquals("er:test", prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE)); + assertEquals("er:test", prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE, false)); } @Test public void unresolveAbsoluteIri_exactPrefixMatch_identical() { prefixDeclarations.setPrefixIri("eg:", BASE); - assertEquals(BASE, prefixDeclarations.unresolveAbsoluteIri(BASE)); + assertEquals(BASE, prefixDeclarations.unresolveAbsoluteIri(BASE, false)); } @Test public void unresolveAbsoluteIri_baseIsMoreSpecific_baseWins() { prefixDeclarations.setBaseIri(MORE_SPECIFIC); prefixDeclarations.setPrefixIri("eg:", BASE); - assertEquals(RELATIVE, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE)); + assertEquals(RELATIVE, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE, false)); } @Test @@ -184,7 +184,7 @@ public void unresolveAbsoluteIri_resolvePrefixedName_doesRoundTrip() throws Pref prefixDeclarations.setPrefixIri(prefix, BASE); String unresolved = prefix + RELATIVE; String resolved = prefixDeclarations.resolvePrefixedName(unresolved); - assertEquals(unresolved, prefixDeclarations.unresolveAbsoluteIri(resolved)); + assertEquals(unresolved, prefixDeclarations.unresolveAbsoluteIri(resolved, false)); } @Test @@ -194,7 +194,7 @@ public void unresolveAbsoluteIri_relativeIriAfterMergeWithNewBase_staysRelative( PrefixDeclarationRegistry prefixDeclarations = new MergingPrefixDeclarationRegistry(); prefixDeclarations.setBaseIri(BASE); this.prefixDeclarations.mergePrefixDeclarations(prefixDeclarations); - assertEquals(relativeIri, this.prefixDeclarations.unresolveAbsoluteIri(relativeIri)); + assertEquals(relativeIri, this.prefixDeclarations.unresolveAbsoluteIri(relativeIri, false)); } @Test @@ -205,7 +205,7 @@ public void unresolveAbsoluteIri_absoluteIriMergedOntoEmptyBase_staysAbsolute() prefixDeclarations.setBaseIri(BASE); String absoluteIri = prefixDeclarations.absolutizeIri(RELATIVE); this.prefixDeclarations.mergePrefixDeclarations(prefixDeclarations); - String resolvedIri = this.prefixDeclarations.unresolveAbsoluteIri(absoluteIri); + String resolvedIri = this.prefixDeclarations.unresolveAbsoluteIri(absoluteIri, false); assertNotEquals(RELATIVE, resolvedIri); assertEquals("rw_gen0:" + RELATIVE, resolvedIri); diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java new file mode 100644 index 000000000..fbcde3f04 --- /dev/null +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java @@ -0,0 +1,214 @@ +package org.semanticweb.rulewerk.core.model; + +import static org.junit.Assert.*; + +import java.io.IOException; +import java.io.StringWriter; +import java.io.Writer; +import java.net.URL; + +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.MergingPrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; + +public class SerializerTest { + + static Term abstractConstant = Expressions.makeAbstractConstant("http://example.org/test"); + static Term abstractConstantShort = Expressions.makeAbstractConstant("c"); + static Term existentialVariable = Expressions.makeExistentialVariable("X"); + static Term universalVariable = Expressions.makeUniversalVariable("X"); + static Term languageStringConstant = Expressions.makeLanguageStringConstant("abc", "de"); + static Term datatypeConstantGeneral = Expressions.makeDatatypeConstant("abc", "http://example.org/test"); + static Term datatypeConstantString = Expressions.makeDatatypeConstant("abc", PrefixDeclarationRegistry.XSD_STRING); + static Term datatypeConstantInteger = Expressions.makeDatatypeConstant("123", + PrefixDeclarationRegistry.XSD_INTEGER); + static Term namedNull = new NamedNullImpl("n1"); + + static Predicate p1 = Expressions.makePredicate("p1", 1); + static Predicate p2 = Expressions.makePredicate("p2", 2); + static Predicate p3 = Expressions.makePredicate("p3", 3); + + static Fact fact = Expressions.makeFact(p1, abstractConstantShort); + static PositiveLiteral l1 = Expressions.makePositiveLiteral(p1, universalVariable); + static Literal l2 = Expressions.makePositiveLiteral(p2, universalVariable, abstractConstantShort); + static Rule rule = Expressions.makeRule(l1, l2, fact); + + StringWriter writer; + Serializer serializer; + + @Before + public void init() { + writer = new StringWriter(); + serializer = new Serializer(writer); + } + + private Serializer getThrowingSerializer() throws IOException { + Writer writerMock = Mockito.mock(Writer.class); + Mockito.doThrow(IOException.class).when(writerMock).write(Mockito.anyString()); + return new Serializer(writerMock); + } + + @Test + public void serializeDatatypeConstant() throws IOException { + serializer.writeTerm(datatypeConstantGeneral); + assertEquals("\"abc\"^^", writer.toString()); + } + + @Test + public void serializeDatatypeConstantString() throws IOException { + serializer.writeTerm(datatypeConstantString); + assertEquals("\"abc\"", writer.toString()); + } + + @Test + public void serializeDatatypeConstantInteger() throws IOException { + serializer.writeTerm(datatypeConstantInteger); + assertEquals("123", writer.toString()); + } + + @Test + public void serializeExistentialVariable() throws IOException { + serializer.writeTerm(existentialVariable); + assertEquals("!X", writer.toString()); + } + + @Test + public void serializeUniversalVariable() throws IOException { + serializer.writeTerm(universalVariable); + assertEquals("?X", writer.toString()); + } + + @Test + public void serializeLanguageStringConstant() throws IOException { + serializer.writeTerm(languageStringConstant); + assertEquals("\"abc\"@de", writer.toString()); + } + + @Test + public void serializeNamedNull() throws IOException { + serializer.writeTerm(namedNull); + assertEquals("_:n1", writer.toString()); + } + + @Test + public void serializeFact() throws IOException { + serializer.writeStatement(fact); + assertEquals("p1(c) .", writer.toString()); + } + + @Test + public void serializeRule() throws IOException { + serializer.writeStatement(rule); + assertEquals("p1(?X) :- p2(?X, c), p1(c) .", writer.toString()); + } + + @Test + public void serializeCsvDataSourceDeclaration() throws IOException { + DataSourceDeclaration csvSourceDecl = new DataSourceDeclarationImpl(p1, new CsvFileDataSource("test.csv")); + serializer.writeStatement(csvSourceDecl); + assertEquals("@source p1[1]: load-csv(\"test.csv\") .", writer.toString()); + } + + @Test + public void serializeRdfDataSourceDeclaration() throws IOException { + DataSourceDeclaration rdfSourceDecl = new DataSourceDeclarationImpl(p3, new RdfFileDataSource("test.nt")); + serializer.writeStatement(rdfSourceDecl); + assertEquals("@source p3[3]: load-rdf(\"test.nt\") .", writer.toString()); + } + + @Test + public void serializeSparqlDataSourceDeclaration() throws IOException { + DataSourceDeclaration sparqlSourceDecl = new DataSourceDeclarationImpl(p1, + new SparqlQueryResultDataSource(new URL("http://example.org"), "var", "?var ")); + serializer.writeStatement(sparqlSourceDecl); + assertEquals("@source p1[1]: sparql(, \"var\", \"?var \") .", writer.toString()); + } + + @Test + public void serializeAbstractConstantWithPrefixDeclarations() throws IOException { + final MergingPrefixDeclarationRegistry prefixes = new MergingPrefixDeclarationRegistry(); + prefixes.setPrefixIri("eg:", "http://example.org/"); + Serializer prefSerializer = new Serializer(writer, prefixes); + + prefSerializer.writeTerm(abstractConstant); + assertEquals("eg:test", writer.toString()); + } + + @Test + public void serializeDatatypeConstantWithPrefixDeclarations() throws IOException { + final MergingPrefixDeclarationRegistry prefixes = new MergingPrefixDeclarationRegistry(); + prefixes.setPrefixIri("eg:", "http://example.org/"); + Serializer prefSerializer = new Serializer(writer, prefixes); + + prefSerializer.writeTerm(datatypeConstantGeneral); + assertEquals("\"abc\"^^eg:test", writer.toString()); + } + + @Test + public void createThrowingSerializer_succeeds() throws IOException { + getThrowingSerializer(); + } + + @Test(expected = IOException.class) + public void serializeAbstractConstant_fails() throws IOException { + getThrowingSerializer().writeTerm(abstractConstant); + } + + @Test(expected = IOException.class) + public void serializeDatatypeConstant_fails() throws IOException { + getThrowingSerializer().writeTerm(datatypeConstantGeneral); + } + + @Test(expected = IOException.class) + public void serializeExistentialVariable_fails() throws IOException { + getThrowingSerializer().writeTerm(existentialVariable); + } + + @Test(expected = IOException.class) + public void serializeUniversalVariable_fails() throws IOException { + getThrowingSerializer().writeTerm(universalVariable); + } + + @Test(expected = IOException.class) + public void serializeLanguageStringConstant_fails() throws IOException { + getThrowingSerializer().writeTerm(languageStringConstant); + } + + @Test(expected = IOException.class) + public void serializeNamedNull_fails() throws IOException { + getThrowingSerializer().writeTerm(namedNull); + } + + @Test(expected = IOException.class) + public void serializeFact_fails() throws IOException { + getThrowingSerializer().writeStatement(fact); + } + + @Test(expected = IOException.class) + public void serializeRule_fails() throws IOException { + getThrowingSerializer().writeStatement(rule); + } + + @Test(expected = IOException.class) + public void serializeDataSourceDeclaration_fails() throws IOException { + DataSourceDeclaration csvSourceDecl = new DataSourceDeclarationImpl(p1, new CsvFileDataSource("test.csv")); + getThrowingSerializer().writeStatement(csvSourceDecl); + } + +} diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java index cf45c534a..ca4561dac 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java @@ -20,7 +20,7 @@ * #L% */ -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; import org.semanticweb.rulewerk.parser.datasources.CsvFileDataSourceDeclarationHandler; import org.semanticweb.rulewerk.parser.datasources.RdfFileDataSourceDeclarationHandler; import org.semanticweb.rulewerk.parser.datasources.SparqlQueryResultDataSourceDeclarationHandler; @@ -43,9 +43,9 @@ public DefaultParserConfiguration() { * Register built-in data sources (currently CSV, RDF, SPARQL). */ private void registerDefaultDataSources() { - registerDataSource(Serializer.CSV_FILE_DATA_SOURCE, new CsvFileDataSourceDeclarationHandler()); - registerDataSource(Serializer.RDF_FILE_DATA_SOURCE, new RdfFileDataSourceDeclarationHandler()); - registerDataSource(Serializer.SPARQL_QUERY_RESULT_DATA_SOURCE, + registerDataSource(OldSerializer.CSV_FILE_DATA_SOURCE, new CsvFileDataSourceDeclarationHandler()); + registerDataSource(OldSerializer.RDF_FILE_DATA_SOURCE, new RdfFileDataSourceDeclarationHandler()); + registerDataSource(OldSerializer.SPARQL_QUERY_RESULT_DATA_SOURCE, new SparqlQueryResultDataSourceDeclarationHandler()); } From 3f0e349dab984124f9b76e29001f2b78669d5e6e Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sat, 22 Aug 2020 23:13:44 +0200 Subject: [PATCH 0696/1003] Update renaming of old serializer --- .../reasoner/vlog/VLogToModelConverter.java | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java index f313fdefd..3b7876dda 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java @@ -30,7 +30,7 @@ import org.semanticweb.rulewerk.core.model.implementation.DatatypeConstantImpl; import org.semanticweb.rulewerk.core.model.implementation.LanguageStringConstantImpl; import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; import org.semanticweb.rulewerk.core.reasoner.implementation.QueryResultImpl; /** @@ -103,19 +103,19 @@ static Term toTerm(karmaresearch.vlog.Term vLogTerm) { */ static Constant toConstant(String vLogConstantName) { final Constant constant; - if (vLogConstantName.charAt(0) == Serializer.LESS_THAN - && vLogConstantName.charAt(vLogConstantName.length() - 1) == Serializer.MORE_THAN) { + if (vLogConstantName.charAt(0) == OldSerializer.LESS_THAN + && vLogConstantName.charAt(vLogConstantName.length() - 1) == OldSerializer.MORE_THAN) { // strip <> off of IRIs constant = new AbstractConstantImpl(vLogConstantName.substring(1, vLogConstantName.length() - 1)); - } else if (vLogConstantName.charAt(0) == Serializer.QUOTE) { - if (vLogConstantName.charAt(vLogConstantName.length() - 1) == Serializer.MORE_THAN) { - final int startTypeIdx = vLogConstantName.lastIndexOf(Serializer.LESS_THAN, + } else if (vLogConstantName.charAt(0) == OldSerializer.QUOTE) { + if (vLogConstantName.charAt(vLogConstantName.length() - 1) == OldSerializer.MORE_THAN) { + final int startTypeIdx = vLogConstantName.lastIndexOf(OldSerializer.LESS_THAN, vLogConstantName.length() - 2); final String datatype = vLogConstantName.substring(startTypeIdx + 1, vLogConstantName.length() - 1); final String lexicalValue = vLogConstantName.substring(1, startTypeIdx - 3); constant = new DatatypeConstantImpl(lexicalValue, datatype); } else { - final int startTypeIdx = vLogConstantName.lastIndexOf(Serializer.AT, vLogConstantName.length() - 2); + final int startTypeIdx = vLogConstantName.lastIndexOf(OldSerializer.AT, vLogConstantName.length() - 2); if (startTypeIdx > -1) { final String languageTag = vLogConstantName.substring(startTypeIdx + 1, vLogConstantName.length()); final String string = vLogConstantName.substring(1, startTypeIdx - 1); From cecd42828267cdf2a715abcb71b13785783d1390 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sat, 22 Aug 2020 23:13:51 +0200 Subject: [PATCH 0697/1003] license header --- .../rulewerk/core/model/SerializerTest.java | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java index fbcde3f04..d8a4fef62 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java @@ -1,5 +1,25 @@ package org.semanticweb.rulewerk.core.model; +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import static org.junit.Assert.*; import java.io.IOException; From ce3bdee5f9de749b88173a7689f9f9cf60b64377 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sat, 22 Aug 2020 23:50:27 +0200 Subject: [PATCH 0698/1003] serialize prefixes correctly --- .../AbstractPrefixDeclarationRegistry.java | 18 +++++++++-- .../core/model/implementation/Serializer.java | 31 +++++++++++++++++++ .../rulewerk/core/reasoner/KnowledgeBase.java | 14 +++++---- .../rulewerk/core/model/SerializerTest.java | 13 +++++++- .../core/reasoner/KnowledgeBaseTest.java | 10 ++++-- 5 files changed, 74 insertions(+), 12 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java index d1565ea81..31124e5d5 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java @@ -28,6 +28,7 @@ import java.util.Map.Entry; import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; /** @@ -98,10 +99,23 @@ public String absolutizeIri(String potentiallyRelativeIri) throws PrefixDeclarat @Override public String unresolveAbsoluteIri(String iri, boolean addIriBrackets) { - String shortestIri = addIriBrackets ? "<" + iri + ">" : iri; + String shortestIri; + if (addIriBrackets) { + if (!iri.contains(":")) { + shortestIri = iri; + if (!PrefixDeclarationRegistry.EMPTY_BASE.equals(baseIri)) { + throw new RulewerkRuntimeException("Relative IRIs cannot be serialized when a base is declared."); + } + } else { + shortestIri = "<" + iri + ">"; + } + } else { + shortestIri = iri; + } + String baseIri = getBaseIri(); - if (baseIri != PrefixDeclarationRegistry.EMPTY_BASE && iri.length() > baseIri.length() + if (!PrefixDeclarationRegistry.EMPTY_BASE.equals(baseIri) && iri.length() > baseIri.length() && iri.startsWith(baseIri)) { String shorterIri = iri.substring(baseIri.length()); // Only allow very simple names of this form, to avoid confusion, e.g., with diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index 23d4c238c..1cdd5d3df 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -23,6 +23,8 @@ */ import java.io.Writer; +import java.util.Iterator; +import java.util.Map.Entry; import java.util.function.Function; import org.semanticweb.rulewerk.core.model.api.AbstractConstant; @@ -423,6 +425,35 @@ public void writeNamedNull(NamedNull namedNull) throws IOException { writer.write(namedNull.getName()); } + /** + * Writes a serialization of the given {@link PrefixDeclarationRegistry}. + * + * @param prefixDeclarationRegistry a {@link PrefixDeclarationRegistry} + * @throws IOException + */ + public void writePrefixDeclarationRegistry(PrefixDeclarationRegistry prefixDeclarationRegistry) throws IOException { + final String baseIri = prefixDeclarationRegistry.getBaseIri(); + if (!PrefixDeclarationRegistry.EMPTY_BASE.contentEquals(baseIri)) { + writer.write("@base <"); + writer.write(baseIri); + writer.write(">"); + writer.write(STATEMENT_END); + writer.write("\n"); + } + + Iterator> prefixIterator = prefixDeclarationRegistry.iterator(); + while (prefixIterator.hasNext()) { + Entry entry = prefixIterator.next(); + writer.write("@prefix "); + writer.write(entry.getKey()); + writer.write(" <"); + writer.write(entry.getValue()); + writer.write(">"); + writer.write(STATEMENT_END); + writer.write("\n"); + } + } + /** * Writes a serialization of the given {@link LanguageStringConstant}. * diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java index e81407f7b..698683e22 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -53,7 +53,7 @@ import org.semanticweb.rulewerk.core.model.api.Statement; import org.semanticweb.rulewerk.core.model.api.StatementVisitor; import org.semanticweb.rulewerk.core.model.implementation.MergingPrefixDeclarationRegistry; -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** * A knowledge base with rules, facts, and declarations for loading data from @@ -589,20 +589,22 @@ public String unresolveAbsoluteIri(String iri) { * stream */ public void writeKnowledgeBase(Writer writer) throws IOException { - writer.write(OldSerializer.getBaseAndPrefixDeclarations(this)); + Serializer serializer = new Serializer(writer, prefixDeclarationRegistry); - for (DataSourceDeclaration dataSource : this.getDataSourceDeclarations()) { - writer.write(OldSerializer.getString(dataSource)); + serializer.writePrefixDeclarationRegistry(prefixDeclarationRegistry); + + for (DataSourceDeclaration dataSourceDeclaration : this.getDataSourceDeclarations()) { + serializer.writeDataSourceDeclaration(dataSourceDeclaration); writer.write('\n'); } for (Fact fact : this.getFacts()) { - writer.write(OldSerializer.getFactString(fact)); + serializer.writeFact(fact); writer.write('\n'); } for (Rule rule : this.getRules()) { - writer.write(OldSerializer.getString(rule)); + serializer.writeRule(rule); writer.write('\n'); } } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java index d8a4fef62..6a41d37fc 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java @@ -180,11 +180,22 @@ public void serializeDatatypeConstantWithPrefixDeclarations() throws IOException assertEquals("\"abc\"^^eg:test", writer.toString()); } + @Test + public void serializePrefixDeclarations() throws IOException { + final MergingPrefixDeclarationRegistry prefixes = new MergingPrefixDeclarationRegistry(); + prefixes.setBaseIri("http://example.org/base"); + prefixes.setPrefixIri("eg:", "http://example.org/"); + Serializer prefSerializer = new Serializer(writer, prefixes); + + prefSerializer.writePrefixDeclarationRegistry(prefixes); + assertEquals("@base .\n@prefix eg: .\n", writer.toString()); + } + @Test public void createThrowingSerializer_succeeds() throws IOException { getThrowingSerializer(); } - + @Test(expected = IOException.class) public void serializeAbstractConstant_fails() throws IOException { getThrowingSerializer().writeTerm(abstractConstant); diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java index 91a350b65..ea3254874 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java @@ -31,6 +31,7 @@ import org.junit.Test; import org.mockito.internal.util.collections.Sets; import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Rule; @@ -137,15 +138,18 @@ public void writeKnowledgeBase_justFacts_succeeds() throws IOException { assertEquals("P(c) .\nP(d) .\nQ(c) .\n", writer.toString()); } - @Test - public void writeKnowledgeBase_withBase_succeeds() throws IOException { + @Test(expected = RulewerkRuntimeException.class) + public void writeKnowledgeBase_withBase_fails() throws IOException { String baseIri = "https://example.org/"; MergingPrefixDeclarationRegistry prefixDeclarations = new MergingPrefixDeclarationRegistry(); prefixDeclarations.setBaseIri(baseIri); this.kb.mergePrefixDeclarations(prefixDeclarations); StringWriter writer = new StringWriter(); this.kb.writeKnowledgeBase(writer); - assertEquals("@base <" + baseIri + "> .\nP(c) .\nP(d) .\nQ(c) .\n", writer.toString()); + //// This would be incorrect, since parsing this would lead to another KB + //// that uses IRIs like : + // assertEquals("@base <" + baseIri + "> .\nP(c) .\nP(d) .\nQ(c) .\n", + // writer.toString()); } @Test From 54a4b931cbc473afd9599b32f74bf000eeaf7b54 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sat, 22 Aug 2020 23:58:50 +0200 Subject: [PATCH 0699/1003] implement missing method --- .../rulewerk/reasoner/vlog/VLogInMemoryDataSource.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java index c6952d09f..4952cfb84 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java @@ -23,6 +23,7 @@ import java.util.Arrays; import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; +import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.reasoner.implementation.DataSourceConfigurationVisitor; /** @@ -88,4 +89,9 @@ public String getSyntacticRepresentation() { public void accept(DataSourceConfigurationVisitor visitor) { visitor.visit(this); } + + @Override + public Fact getDeclarationFact() { + throw new UnsupportedOperationException("VLogInMemoryDataSource is cannot be serialized."); + } } From 0c776df0267001cf0882c4228186e536ec8eae80 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 00:16:11 +0200 Subject: [PATCH 0700/1003] support serialization to string --- .../core/model/implementation/Serializer.java | 27 +++++++++++++++++++ .../rulewerk/core/model/SerializerTest.java | 1 + 2 files changed, 28 insertions(+) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index 1cdd5d3df..a624590be 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -1,6 +1,7 @@ package org.semanticweb.rulewerk.core.model.implementation; import java.io.IOException; +import java.io.StringWriter; /*- * #%L @@ -71,6 +72,14 @@ public String apply(String iri) { } }; + /** + * Interface for a method that writes something to a writer. + */ + @FunctionalInterface + public interface SerializationWriter { + void write(final Serializer serializer) throws IOException; + } + final Writer writer; final Function iriTransformer; final SerializerTermVisitor serializerTermVisitor = new SerializerTermVisitor(); @@ -466,6 +475,24 @@ public void writeLanguageStringConstant(LanguageStringConstant languageStringCon writer.write(languageStringConstant.getLanguageTag()); } + /** + * Convenience method for obtaining serializations as Java strings. + * + * @param writeAction a function that accepts a {@link Serializer} and produces + * a string + * @return serialization string + */ + public static String getSerialization(SerializationWriter writeAction) { + final StringWriter stringWriter = new StringWriter(); + final Serializer serializer = new Serializer(stringWriter); + try { + writeAction.write(serializer); + } catch (IOException e) { + throw new RuntimeException("StringWriter should never throw an IOException."); + } + return stringWriter.toString(); + } + /** * Escapes (with {@code \}) special character occurrences in given * {@code string}. The special characters are: diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java index 6a41d37fc..871fec96f 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java @@ -130,6 +130,7 @@ public void serializeNamedNull() throws IOException { public void serializeFact() throws IOException { serializer.writeStatement(fact); assertEquals("p1(c) .", writer.toString()); + assertEquals("p1(c) .", Serializer.getSerialization(serializer -> serializer.writeFact(fact))); } @Test From 19b00b856c291cab68a72a7f0946379144cfba02 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 01:37:59 +0200 Subject: [PATCH 0701/1003] use new Serializer --- .../core/model/api/AbstractConstant.java | 13 - .../rulewerk/core/model/api/Command.java | 3 +- .../rulewerk/core/model/api/Conjunction.java | 7 - .../core/model/api/DataSourceDeclaration.java | 6 - .../core/model/api/DatatypeConstant.java | 13 - .../rulewerk/core/model/api/Entity.java | 6 - .../core/model/api/ExistentialVariable.java | 6 - .../rulewerk/core/model/api/Fact.java | 7 - .../model/api/LanguageStringConstant.java | 13 - .../rulewerk/core/model/api/Literal.java | 7 - .../rulewerk/core/model/api/NamedNull.java | 13 - .../rulewerk/core/model/api/Predicate.java | 7 - .../rulewerk/core/model/api/Rule.java | 7 - .../rulewerk/core/model/api/Term.java | 11 - .../core/model/api/UniversalVariable.java | 6 - .../rulewerk/core/model/api/Variable.java | 6 - .../implementation/AbstractConstantImpl.java | 2 +- .../implementation/AbstractLiteralImpl.java | 2 +- .../AbstractPrefixDeclarationRegistry.java | 11 +- .../model/implementation/ConjunctionImpl.java | 2 +- .../DataSourceDeclarationImpl.java | 2 +- .../implementation/DatatypeConstantImpl.java | 4 +- .../ExistentialVariableImpl.java | 2 +- .../core/model/implementation/FactImpl.java | 2 +- .../LanguageStringConstantImpl.java | 4 +- .../model/implementation/NamedNullImpl.java | 2 +- .../model/implementation/OldSerializer.java | 560 ++++++++++++++++++ .../model/implementation/PredicateImpl.java | 2 +- .../core/model/implementation/RuleImpl.java | 2 +- .../core/model/implementation/Serializer.java | 26 +- .../implementation/UniversalVariableImpl.java | 2 +- .../rulewerk/core/reasoner/Reasoner.java | 33 +- .../implementation/CsvFileDataSource.java | 15 +- .../implementation/RdfFileDataSource.java | 14 +- .../SparqlQueryResultDataSource.java | 13 +- .../core/model/DataSourceDeclarationTest.java | 5 +- .../rulewerk/core/model/SerializerTest.java | 19 + .../rulewerk/core/model/TermImplTest.java | 6 +- .../parser/DefaultParserConfiguration.java | 11 +- .../rulewerk/parser/EntityTest.java | 20 +- .../rulewerk/parser/RuleParserTest.java | 6 +- .../reasoner/vlog/VLogInMemoryDataSource.java | 2 +- .../reasoner/vlog/VLogToModelConverter.java | 31 +- .../vlog/VLogReasonerWriteInferencesTest.java | 22 +- 44 files changed, 713 insertions(+), 240 deletions(-) create mode 100644 rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/OldSerializer.java diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/AbstractConstant.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/AbstractConstant.java index 70592a3e6..5d43e7cf3 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/AbstractConstant.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/AbstractConstant.java @@ -20,10 +20,6 @@ * #L% */ -import java.util.function.Function; - -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; - /** * Interface for abstract constants, i.e. for constants that represent an * abstract domain element (in contrast to a specific value of a concrete @@ -38,13 +34,4 @@ default TermType getType() { return TermType.ABSTRACT_CONSTANT; } - @Override - default String getSyntacticRepresentation() { - return OldSerializer.getString(this); - } - - @Override - default String getSyntacticRepresentation(Function iriTransformer) { - return OldSerializer.getString(this, iriTransformer); - } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java index fce5aa32a..1a5dc295b 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java @@ -64,7 +64,6 @@ public List getArguments() { return arguments; } - @Override public String getSyntacticRepresentation() { StringBuilder result = new StringBuilder("@"); result.append(name); @@ -75,7 +74,7 @@ public String getSyntacticRepresentation() { result.append(OldSerializer.getString(rule.getHead())).append(OldSerializer.RULE_SEPARATOR) .append(OldSerializer.getString(rule.getBody())); } else if (argument.fromPositiveLiteral().isPresent()) { - result.append(argument.fromPositiveLiteral().get().getSyntacticRepresentation()); + result.append(argument.fromPositiveLiteral().get().toString()); } else { throw new UnsupportedOperationException("Serialisation of commands is not fully implemented yet."); } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java index 2b7511580..60b53ea1a 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java @@ -22,8 +22,6 @@ import java.util.List; -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; - /** * Interface for representing conjunctions of {@link Literal}s, i.e., lists of * (negated or positive) atomic formulas that are connected with logical AND. @@ -41,9 +39,4 @@ public interface Conjunction extends Iterable, SyntaxObjec */ List getLiterals(); - @Override - default String getSyntacticRepresentation() { - return OldSerializer.getString(this); - } - } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java index c609e1a03..14fb54412 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java @@ -1,7 +1,5 @@ package org.semanticweb.rulewerk.core.model.api; -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; - /*- * #%L * Rulewerk Core Components @@ -45,8 +43,4 @@ public interface DataSourceDeclaration extends Statement, Entity { */ DataSource getDataSource(); - @Override - default String getSyntacticRepresentation() { - return OldSerializer.getString(this); - } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java index 8f4f7c414..0de18e509 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java @@ -20,10 +20,6 @@ * #L% */ -import java.util.function.Function; - -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; - /** * Interface for datatype constants, i.e. for constants that represent a * specific value of a concrete datatype). Such terms are of type @@ -62,13 +58,4 @@ default TermType getType() { */ String getLexicalValue(); - @Override - default String getSyntacticRepresentation() { - return OldSerializer.getString(this); - } - - @Override - default String getSyntacticRepresentation(Function iriTransformer) { - return OldSerializer.getString(this, iriTransformer); - } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Entity.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Entity.java index 431b90299..c4bfd7a16 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Entity.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Entity.java @@ -27,11 +27,5 @@ * */ public interface Entity { - /** - * Returns the parsable string representation of an Entity. - * - * @return non-empty String - */ - String getSyntacticRepresentation(); } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/ExistentialVariable.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/ExistentialVariable.java index 35f08ab39..1b47a54c7 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/ExistentialVariable.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/ExistentialVariable.java @@ -20,8 +20,6 @@ * #L% */ -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; - /** * Interface for existentially quantified variables, i.e., variables that appear * in the scope of an (implicit) existential quantifier in a rule. @@ -35,8 +33,4 @@ default TermType getType() { return TermType.EXISTENTIAL_VARIABLE; } - @Override - default String getSyntacticRepresentation() { - return OldSerializer.getString(this); - } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Fact.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Fact.java index 5b1fc9df2..d3b938897 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Fact.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Fact.java @@ -1,7 +1,5 @@ package org.semanticweb.rulewerk.core.model.api; -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; - /*- * #%L * Rulewerk Core Components @@ -31,9 +29,4 @@ */ public interface Fact extends PositiveLiteral, Statement { - @Override - default String getSyntacticRepresentation() { - return OldSerializer.getFactString(this); - } - } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/LanguageStringConstant.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/LanguageStringConstant.java index 2d7d082d5..e19bf3517 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/LanguageStringConstant.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/LanguageStringConstant.java @@ -20,10 +20,6 @@ * #L% */ -import java.util.function.Function; - -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; - /** * Interface for string constants with a language tag, used to represent values * of type http://www.w3.org/1999/02/22-rdf-syntax-ns#langString in RDF, OWL, @@ -65,13 +61,4 @@ default String getDatatype() { */ String getLanguageTag(); - @Override - default String getSyntacticRepresentation() { - return OldSerializer.getConstantName(this); - } - - @Override - default String getSyntacticRepresentation(Function iriTransformer) { - return getSyntacticRepresentation(); - } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java index d40d3a04a..896dc1f6f 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java @@ -22,8 +22,6 @@ import java.util.List; -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; - /** * Interface for literals. A positive literal is simply an atomic formula, i.e., * a formula of the form P(t1,...,tn) where P is a {@link Predicate} of arity n @@ -52,9 +50,4 @@ public interface Literal extends SyntaxObject, Entity { */ List getArguments(); - @Override - default String getSyntacticRepresentation() { - return OldSerializer.getString(this); - } - } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NamedNull.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NamedNull.java index b78a6be6d..c4f6354cb 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NamedNull.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NamedNull.java @@ -20,10 +20,6 @@ * #L% */ -import java.util.function.Function; - -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; - /** * Interface for {@link TermType#NAMED_NULL} terms. A blank is an entity used to * represent anonymous domain elements introduced during the reasoning process @@ -38,13 +34,4 @@ default TermType getType() { return TermType.NAMED_NULL; } - @Override - default String getSyntacticRepresentation() { - return OldSerializer.getString(this); - } - - @Override - default String getSyntacticRepresentation(Function iriTransformer) { - return getSyntacticRepresentation(); - } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Predicate.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Predicate.java index 5ff3a4daa..7ed7d48f8 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Predicate.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Predicate.java @@ -20,8 +20,6 @@ * #L% */ -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; - /** * A Predicate represents a relation between terms. Is uniquely identified by * its name and arity. The arity determines the number of terms allowed in the @@ -48,9 +46,4 @@ public interface Predicate extends Entity { * @return the arity of the Predicate. */ int getArity(); - - @Override - default String getSyntacticRepresentation() { - return OldSerializer.getString(this); - } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Rule.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Rule.java index 6a49dd596..6b4e0ea79 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Rule.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Rule.java @@ -1,7 +1,5 @@ package org.semanticweb.rulewerk.core.model.api; -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; - /*- * #%L * Rulewerk Core Components @@ -48,9 +46,4 @@ public interface Rule extends SyntaxObject, Statement, Entity { */ Conjunction getBody(); - @Override - default String getSyntacticRepresentation() { - return OldSerializer.getString(this); - } - } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Term.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Term.java index c8aabae47..3bbabcfe4 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Term.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Term.java @@ -20,8 +20,6 @@ * #L% */ -import java.util.function.Function; - /** * Interface for terms. A term is characterized by a string name and a * {@link TermType}. @@ -73,13 +71,4 @@ default boolean isVariable() { */ T accept(TermVisitor termVisitor); - /** - * Return the parseable string representation of this Term, transforming IRIs. - * - * @param iriTransformer a function that is applied to transform any IRIs - * occurring in the output. - * - * @return non-empty String containing the representation. - */ - String getSyntacticRepresentation(Function iriTransformer); } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/UniversalVariable.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/UniversalVariable.java index df279681e..74e95c6ae 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/UniversalVariable.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/UniversalVariable.java @@ -20,8 +20,6 @@ * #L% */ -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; - /** * Interface for universally quantified variables, i.e., variables that appear * in the scope of an (implicit) universal quantifier in a rule. @@ -35,8 +33,4 @@ default TermType getType() { return TermType.UNIVERSAL_VARIABLE; } - @Override - default String getSyntacticRepresentation() { - return OldSerializer.getString(this); - } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Variable.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Variable.java index ba0785752..c68d289c2 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Variable.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Variable.java @@ -20,8 +20,6 @@ * #L% */ -import java.util.function.Function; - /** * Interface for variables, i.e., terms of type * {@link TermType#UNIVERSAL_VARIABLE} and @@ -32,8 +30,4 @@ * @author Markus Krötzsch */ public interface Variable extends Term { - @Override - default String getSyntacticRepresentation(Function iriTransformer) { - return getSyntacticRepresentation(); - } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractConstantImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractConstantImpl.java index 426c993c6..508360739 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractConstantImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractConstantImpl.java @@ -47,6 +47,6 @@ public T accept(TermVisitor termVisitor) { @Override public String toString() { - return getSyntacticRepresentation(); + return Serializer.getSerialization(serializer -> serializer.writeAbstractConstant(this)); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java index 4b3669226..4ee1b98e0 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java @@ -93,7 +93,7 @@ public boolean equals(final Object obj) { @Override public String toString() { - return getSyntacticRepresentation(); + return Serializer.getSerialization(serializer -> serializer.writeLiteral(this)); } @Override diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java index 31124e5d5..14673acdc 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java @@ -37,6 +37,12 @@ * @author Maximilian Marx */ public abstract class AbstractPrefixDeclarationRegistry implements PrefixDeclarationRegistry { + + /** + * Pattern for strings that are permissible as local names in abbreviated forms. + */ + static public final String REGEXP_LOCNAME = "^[a-zA-Z]([/a-zA-Z0-9_-])*$"; + /** * Map associating each prefixName with the full prefixIri. */ @@ -101,7 +107,7 @@ public String absolutizeIri(String potentiallyRelativeIri) throws PrefixDeclarat public String unresolveAbsoluteIri(String iri, boolean addIriBrackets) { String shortestIri; if (addIriBrackets) { - if (!iri.contains(":")) { + if (!iri.contains(":") && iri.matches(REGEXP_LOCNAME)) { shortestIri = iri; if (!PrefixDeclarationRegistry.EMPTY_BASE.equals(baseIri)) { throw new RulewerkRuntimeException("Relative IRIs cannot be serialized when a base is declared."); @@ -120,8 +126,7 @@ public String unresolveAbsoluteIri(String iri, boolean addIriBrackets) { String shorterIri = iri.substring(baseIri.length()); // Only allow very simple names of this form, to avoid confusion, e.g., with // numbers or boolean literals: - if (shorterIri.matches("^[a-zA-Z]([/a-zA-Z0-9_-])*$") && !"true".equals(shorterIri) - || !"false".equals(shorterIri)) { + if (shorterIri.matches(REGEXP_LOCNAME) && !"true".equals(shorterIri) || !"false".equals(shorterIri)) { shortestIri = shorterIri; } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ConjunctionImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ConjunctionImpl.java index 24a750694..6da458d75 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ConjunctionImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ConjunctionImpl.java @@ -87,7 +87,7 @@ public Iterator iterator() { @Override public String toString() { - return getSyntacticRepresentation(); + return Serializer.getSerialization(serializer -> serializer.writeLiteralConjunction(this)); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DataSourceDeclarationImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DataSourceDeclarationImpl.java index db80f1549..c031d5af4 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DataSourceDeclarationImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DataSourceDeclarationImpl.java @@ -85,7 +85,7 @@ public T accept(StatementVisitor statementVisitor) { @Override public String toString() { - return getSyntacticRepresentation(); + return Serializer.getSerialization(serializer -> serializer.writeDataSourceDeclaration(this)); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java index f58e13002..8696190c7 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java @@ -60,12 +60,12 @@ public String getLexicalValue() { @Override public String toString() { - return getSyntacticRepresentation(); + return Serializer.getSerialization(serializer -> serializer.writeDatatypeConstant(this)); } @Override public String getName() { - return OldSerializer.getConstantName(this); + return toString(); } @Override diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ExistentialVariableImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ExistentialVariableImpl.java index 2f7f41e5f..3a130ee21 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ExistentialVariableImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ExistentialVariableImpl.java @@ -46,6 +46,6 @@ public T accept(TermVisitor termVisitor) { @Override public String toString() { - return getSyntacticRepresentation(); + return Serializer.getSerialization(serializer -> serializer.writeExistentialVariable(this)); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/FactImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/FactImpl.java index fad53f4bb..aa984d178 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/FactImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/FactImpl.java @@ -51,7 +51,7 @@ public T accept(final StatementVisitor statementVisitor) { @Override public String toString() { - return this.getSyntacticRepresentation(); + return Serializer.getSerialization(serializer -> serializer.writeFact(this)); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/LanguageStringConstantImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/LanguageStringConstantImpl.java index 8a7ae5ca1..e7ab0f8dd 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/LanguageStringConstantImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/LanguageStringConstantImpl.java @@ -44,7 +44,7 @@ public LanguageStringConstantImpl(String string, String languageTag) { @Override public String getName() { - return OldSerializer.getConstantName(this); + return toString(); } @Override @@ -84,7 +84,7 @@ public boolean equals(Object obj) { @Override public String toString() { - return getSyntacticRepresentation(); + return Serializer.getSerialization(serializer -> serializer.writeLanguageStringConstant(this)); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NamedNullImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NamedNullImpl.java index 803629460..0dae674db 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NamedNullImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NamedNullImpl.java @@ -48,6 +48,6 @@ public T accept(TermVisitor termVisitor) { @Override public String toString() { - return getSyntacticRepresentation(); + return Serializer.getSerialization(serializer -> serializer.writeNamedNull(this)); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/OldSerializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/OldSerializer.java new file mode 100644 index 000000000..44cb41273 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/OldSerializer.java @@ -0,0 +1,560 @@ +package org.semanticweb.rulewerk.core.model.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.List; +import java.util.Map.Entry; +import java.util.function.Function; + +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; +import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; + +/** + * A utility class with static methods to obtain the correct parsable string + * representation of the different data models. + * + * @author Ali Elhalawati + * + */ +@Deprecated +public final class OldSerializer { + private static final String NEW_LINE = "\n"; + public static final String STATEMENT_SEPARATOR = " ."; + public static final String COMMA = ", "; + public static final String NEGATIVE_IDENTIFIER = "~"; + public static final String EXISTENTIAL_IDENTIFIER = "!"; + public static final String UNIVERSAL_IDENTIFIER = "?"; + public static final String NAMEDNULL_IDENTIFIER = "_:"; + public static final String OPENING_PARENTHESIS = "("; + public static final String CLOSING_PARENTHESIS = ")"; + public static final String OPENING_BRACKET = "["; + public static final String CLOSING_BRACKET = "]"; + public static final String RULE_SEPARATOR = " :- "; + public static final char AT = '@'; + public static final String DATA_SOURCE = "@source "; + public static final String BASE = "@base "; + public static final String PREFIX = "@prefix "; + public static final String CSV_FILE_DATA_SOURCE = "load-csv"; + public static final String RDF_FILE_DATA_SOURCE = "load-rdf"; + public static final String SPARQL_QUERY_RESULT_DATA_SOURCE = "sparql"; + public static final String DATA_SOURCE_SEPARATOR = ": "; + public static final String COLON = ":"; + public static final String DOUBLE_CARET = "^^"; + public static final char LESS_THAN = '<'; + public static final char MORE_THAN = '>'; + public static final char QUOTE = '"'; + + public static final String REGEX_DOUBLE = "^[-+]?[0-9]+[.]?[0-9]*([eE][-+]?[0-9]+)?$"; + public static final String REGEX_INTEGER = "^[-+]?\\d+$"; + public static final String REGEX_DECIMAL = "^(\\d*\\.)?\\d+$"; + public static final String REGEX_TRUE = "true"; + public static final String REGEX_FALSE = "false"; + + /** + * Constructor. + */ + private OldSerializer() { + + } + + /** + * Creates a String representation of a given {@link Rule}. + * + * @see Rule + * syntax + * @param rule a {@link Rule}. + * @return String representation corresponding to a given {@link Rule}. + * + */ + public static String getString(final Rule rule) { + return getString(rule.getHead()) + RULE_SEPARATOR + getString(rule.getBody()) + STATEMENT_SEPARATOR; + } + + /** + * Creates a String representation of a given {@link Conjunction}. + * + * @see Rule + * syntax + * @param conjunction a {@link Conjunction} + * @return String representation corresponding to a given {@link Conjunction}. + */ + public static String getString(final Conjunction conjunction) { + final StringBuilder stringBuilder = new StringBuilder(); + boolean first = true; + for (final Literal literal : conjunction.getLiterals()) { + if (first) { + first = false; + } else { + stringBuilder.append(COMMA); + } + stringBuilder.append(getString(literal)); + } + return stringBuilder.toString(); + } + + /** + * Creates a String representation of a given {@link Literal}. + * + * @see Rule + * syntax + * @param literal a {@link Literal} + * @return String representation corresponding to a given {@link Literal}. + */ + public static String getString(final Literal literal) { + final StringBuilder stringBuilder = new StringBuilder(""); + if (literal.isNegated()) { + stringBuilder.append(NEGATIVE_IDENTIFIER); + } + stringBuilder.append(getString(literal.getPredicate(), literal.getArguments())); + return stringBuilder.toString(); + } + + /** + * Creates a String representation of a given {@link Fact}. + * + * @see Rule + * syntax + * @param fact a {@link Fact} + * @return String representation corresponding to a given {@link Fact}. + */ + public static String getFactString(final Fact fact) { + return getString(fact) + STATEMENT_SEPARATOR; + } + + /** + * Creates a String representation of a given {@link AbstractConstant}. + * + * @see Rule + * syntax + * @param constant a {@link AbstractConstant} + * @param iriTransformer a function to transform IRIs with. + * @return String representation corresponding to a given + * {@link AbstractConstant}. + */ + public static String getString(final AbstractConstant constant, final Function iriTransformer) { + return getIRIString(constant.getName(), iriTransformer); + } + + /** + * Creates a String representation of a given {@link AbstractConstant}. + * + * @see Rule + * syntax + * @param constant a {@link AbstractConstant} + * @return String representation corresponding to a given + * {@link AbstractConstant}. + */ + public static String getString(final AbstractConstant constant) { + return getIRIString(constant.getName()); + } + + /** + * Creates a String representation corresponding to the name of a given + * {@link LanguageStringConstant}. + * + * @see Rule + * syntax + * @param languageStringConstant a {@link LanguageStringConstant} + * @return String representation corresponding to the name of a given + * {@link LanguageStringConstant}. + */ + public static String getConstantName(final LanguageStringConstant languageStringConstant) { + return getString(languageStringConstant.getString()) + AT + languageStringConstant.getLanguageTag(); + } + + /** + * Creates a String representation corresponding to the given + * {@link DatatypeConstant}. For datatypes that have specialised lexical + * representations (i.e., xsd:String, xsd:Decimal, xsd:Integer, and xsd:Double), + * this representation is returned, otherwise the result is a generic literal + * with full datatype IRI. + * + * examples: + *
      + *
    • {@code "string"^^xsd:String} results in {@code "string"},
    • + *
    • {@code "23.0"^^xsd:Decimal} results in {@code 23.0},
    • + *
    • {@code "42"^^xsd:Integer} results in {@code 42},
    • + *
    • {@code "23.42"^^xsd:Double} results in {@code 23.42E0}, and
    • + *
    • {@code "test"^^} results in + * {@code "test"^^}, modulo transformation of the datatype + * IRI.
    • + *
    + * + * @see Rule + * syntax + * @param datatypeConstant a {@link DatatypeConstant} + * @param iriTransformer a function to transform IRIs with. + * @return String representation corresponding to a given + * {@link DatatypeConstant}. + */ + public static String getString(final DatatypeConstant datatypeConstant, + final Function iriTransformer) { + if (datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_STRING)) { + return getString(datatypeConstant.getLexicalValue()); + } else if (datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_DECIMAL) + || datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_INTEGER) + || datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_DOUBLE)) { + return datatypeConstant.getLexicalValue(); + } + + return getConstantName(datatypeConstant, iriTransformer); + } + + /** + * Creates a String representation corresponding to the given + * {@link DatatypeConstant}. For datatypes that have specialised lexical + * representations (i.e., xsd:String, xsd:Decimal, xsd:Integer, and xsd:Double), + * this representation is returned, otherwise the result is a generic literal + * with full datatype IRI. + * + * examples: + *
      + *
    • {@code "string"^^xsd:String} results in {@code "string"},
    • + *
    • {@code "23.0"^^xsd:Decimal} results in {@code 23.0},
    • + *
    • {@code "42"^^xsd:Integer} results in {@code 42},
    • + *
    • {@code "23.42"^^xsd:Double} results in {@code 23.42E0}, and
    • + *
    • {@code "test"^^} results in + * {@code "test"^^}.
    • + *
    + * + * @param datatypeConstant a {@link DatatypeConstant} + * @return String representation corresponding to a given + * {@link DatatypeConstant}. + */ + public static String getString(final DatatypeConstant datatypeConstant) { + return getString(datatypeConstant, Function.identity()); + } + + /** + * Creates a String representation corresponding to the name of a given + * {@link DatatypeConstant} including an IRI. + * + * @see Rule + * syntax + * @param datatypeConstant a {@link DatatypeConstant} + * @return String representation corresponding to a given + * {@link DatatypeConstant}. + */ + private static String getConstantName(final DatatypeConstant datatypeConstant, + final Function iriTransformer) { + return getString(datatypeConstant.getLexicalValue()) + DOUBLE_CARET + + getIRIString(datatypeConstant.getDatatype(), iriTransformer); + } + + /** + * Creates a String representation corresponding to the name of a given + * {@link DatatypeConstant} including an IRI. + * + * @see Rule + * syntax + * @param datatypeConstant a {@link DatatypeConstant} + * @return String representation corresponding to a given + * {@link DatatypeConstant}. + */ + public static String getConstantName(final DatatypeConstant datatypeConstant) { + return getString(datatypeConstant.getLexicalValue()) + DOUBLE_CARET + + addAngleBrackets(datatypeConstant.getDatatype()); + } + + /** + * Creates a String representation of a given {@link ExistentialVariable}. + * + * @see Rule + * syntax + * @param existentialVariable a {@link ExistentialVariable} + * @return String representation corresponding to a given + * {@link ExistentialVariable}. + */ + public static String getString(final ExistentialVariable existentialVariable) { + return EXISTENTIAL_IDENTIFIER + existentialVariable.getName(); + } + + /** + * Creates a String representation of a given {@link UniversalVariable}. + * + * @see Rule + * syntax + * @param universalVariable a {@link UniversalVariable} + * @return String representation corresponding to a given + * {@link UniversalVariable}. + */ + public static String getString(final UniversalVariable universalVariable) { + return UNIVERSAL_IDENTIFIER + universalVariable.getName(); + } + + /** + * Creates a String representation of a given {@link NamedNull}. + * + * @see Rule + * syntax + * @param namedNull a {@link NamedNull} + * @return String representation corresponding to a given {@link NamedNull}. + */ + public static String getString(final NamedNull namedNull) { + return NAMEDNULL_IDENTIFIER + namedNull.getName(); + } + + /** + * Creates a String representation of a given {@link Predicate}. + * + * @see Rule + * syntax + * @param predicate a {@link Predicate} + * @return String representation corresponding to a given {@link Predicate}. + */ + public static String getString(final Predicate predicate) { + return predicate.getName() + OPENING_BRACKET + predicate.getArity() + CLOSING_BRACKET; + } + + /** + * Creates a String representation of a given {@link DataSourceDeclaration}. + * + * @see Rule + * syntax + * @param dataSourceDeclaration a {@link DataSourceDeclaration} + * @return String representation corresponding to a given + * {@link DataSourceDeclaration}. + */ + public static String getString(final DataSourceDeclaration dataSourceDeclaration) { + return DATA_SOURCE + getString(dataSourceDeclaration.getPredicate()) + DATA_SOURCE_SEPARATOR; + // + dataSourceDeclaration.getDataSource().getSyntacticRepresentation() + + // STATEMENT_SEPARATOR; + } + + /** + * Creates a String representation of a given {@link CsvFileDataSource}. + * + * @see Rule + * syntax + * + * @param csvFileDataSource + * @return String representation corresponding to a given + * {@link CsvFileDataSource}. + */ + public static String getString(final CsvFileDataSource csvFileDataSource) { + return CSV_FILE_DATA_SOURCE + OPENING_PARENTHESIS + getFileString(csvFileDataSource) + CLOSING_PARENTHESIS; + } + + /** + * Creates a String representation of a given {@link RdfFileDataSource}. + * + * @see Rule + * syntax + * + * + * @param rdfFileDataSource + * @return String representation corresponding to a given + * {@link RdfFileDataSource}. + */ + public static String getString(final RdfFileDataSource rdfFileDataSource) { + return RDF_FILE_DATA_SOURCE + OPENING_PARENTHESIS + getFileString(rdfFileDataSource) + CLOSING_PARENTHESIS; + } + + /** + * Creates a String representation of a given + * {@link SparqlQueryResultDataSource}. + * + * @see Rule + * syntax + * + * + * @param dataSource + * @return String representation corresponding to a given + * {@link SparqlQueryResultDataSource}. + */ + public static String getString(final SparqlQueryResultDataSource dataSource) { + return SPARQL_QUERY_RESULT_DATA_SOURCE + OPENING_PARENTHESIS + + addAngleBrackets(dataSource.getEndpoint().toString()) + COMMA + + addQuotes(dataSource.getQueryVariables()) + COMMA + addQuotes(dataSource.getQueryBody()) + + CLOSING_PARENTHESIS; + } + + private static String getFileString(final FileDataSource fileDataSource) { + return getString(fileDataSource.getPath()); + } + + private static String getIRIString(final String string) { + return getIRIString(string, Function.identity()); + } + + private static String getIRIString(final String string, final Function iriTransformer) { + final String transformed = iriTransformer.apply(string); + + if (!transformed.equals(string)) { + return transformed; + } + + if (string.contains(COLON) || string.matches(REGEX_INTEGER) || string.matches(REGEX_DOUBLE) + || string.matches(REGEX_DECIMAL) || string.equals(REGEX_TRUE) || string.equals(REGEX_FALSE)) { + return addAngleBrackets(string); + } + + return string; + } + + /** + * Constructs the parseable, serialized representation of given {@code string}. + * Escapes (with {@code \}) special character occurrences in given + * {@code string}, and surrounds the result with double quotation marks + * ({@code "}). The special characters are: + *
      + *
    • {@code \}
    • + *
    • {@code "}
    • + *
    • {@code \t}
    • + *
    • {@code \b}
    • + *
    • {@code \n}
    • + *
    • {@code \r}
    • + *
    • {@code \f}
    • + *
    + * Example for {@code string = "\\a"}, the returned value is + * {@code string = "\"\\\\a\""} + * + * @param string + * @return an escaped string surrounded by {@code "}. + */ + public static String getString(final String string) { + return addQuotes(escape(string)); + } + + /** + * Escapes (with {@code \}) special character occurrences in given + * {@code string}. The special characters are: + *
      + *
    • {@code \}
    • + *
    • {@code "}
    • + *
    • {@code \t}
    • + *
    • {@code \b}
    • + *
    • {@code \n}
    • + *
    • {@code \r}
    • + *
    • {@code \f}
    • + *
    + * + * @param string + * @return an escaped string + */ + private static String escape(final String string) { + return string.replace("\\", "\\\\").replace("\"", "\\\"").replace("\t", "\\t").replace("\b", "\\b") + .replace(NEW_LINE, "\\n").replace("\r", "\\r").replace("\f", "\\f"); + // don't touch single quotes here since we only construct double-quoted strings + } + + private static String addQuotes(final String string) { + return QUOTE + string + QUOTE; + } + + private static String addAngleBrackets(final String string) { + return LESS_THAN + string + MORE_THAN; + } + + public static String getFactString(final Predicate predicate, final List terms) { + return getString(predicate, terms) + STATEMENT_SEPARATOR + NEW_LINE; + } + + public static String getFactString(final Predicate predicate, final List terms, + final Function iriTransformer) { + return getString(predicate, terms, iriTransformer) + STATEMENT_SEPARATOR + NEW_LINE; + } + + public static String getString(final Predicate predicate, final List terms) { + return getString(predicate, terms, Function.identity()); + } + + public static String getString(final Predicate predicate, final List terms, + final Function iriTransformer) { + final StringBuilder stringBuilder = new StringBuilder(getIRIString(predicate.getName(), iriTransformer)); + stringBuilder.append(OPENING_PARENTHESIS); + + boolean first = true; + for (final Term term : terms) { + if (first) { + first = false; + } else { + stringBuilder.append(COMMA); + } + final String string = term.getName();// term.getSyntacticRepresentation(iriTransformer); + stringBuilder.append(string); + } + stringBuilder.append(CLOSING_PARENTHESIS); + return stringBuilder.toString(); + } + + public static String getBaseString(final KnowledgeBase knowledgeBase) { + final String baseIri = knowledgeBase.getBaseIri(); + + return baseIri.equals(PrefixDeclarationRegistry.EMPTY_BASE) ? baseIri : getBaseDeclarationString(baseIri); + } + + private static String getBaseDeclarationString(final String baseIri) { + return BASE + addAngleBrackets(baseIri) + STATEMENT_SEPARATOR + NEW_LINE; + } + + public static String getPrefixString(final Entry prefix) { + return PREFIX + prefix.getKey() + " " + addAngleBrackets(prefix.getValue()) + STATEMENT_SEPARATOR + NEW_LINE; + } + + public static String getBaseAndPrefixDeclarations(final KnowledgeBase knowledgeBase) { + final StringBuilder sb = new StringBuilder(); + + sb.append(getBaseString(knowledgeBase)); + knowledgeBase.getPrefixes().forEachRemaining(prefix -> sb.append(getPrefixString(prefix))); + + return sb.toString(); + } + + public static String getCommandName(final String commandName) { + return AT + commandName; + } +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java index da4bff697..accb87bc4 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java @@ -86,7 +86,7 @@ public boolean equals(Object obj) { @Override public String toString() { - return getSyntacticRepresentation(); + return Serializer.getSerialization(serializer -> serializer.writePredicate(this)); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RuleImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RuleImpl.java index aae5c7233..15f35d27e 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RuleImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RuleImpl.java @@ -105,7 +105,7 @@ public boolean equals(final Object obj) { @Override public String toString() { - return getSyntacticRepresentation(); + return Serializer.getSerialization(serializer -> serializer.writeRule(this)); } @Override diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index a624590be..fe851f090 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -25,6 +25,7 @@ import java.io.Writer; import java.util.Iterator; +import java.util.List; import java.util.Map.Entry; import java.util.function.Function; @@ -68,7 +69,11 @@ public class Serializer { public static final Function identityIriSerializer = new Function() { @Override public String apply(String iri) { - return iri.contains(":") ? "<" + iri + ">" : iri; + if (iri.contains(":") || !iri.matches(AbstractPrefixDeclarationRegistry.REGEXP_LOCNAME)) { + return "<" + iri + ">"; + } else { + return iri; + } } }; @@ -310,11 +315,26 @@ public void writeDataSourceDeclaration(DataSourceDeclaration dataSourceDeclarati * @throws IOException */ public void writeLiteral(Literal literal) throws IOException { - writer.write(getIri(literal.getPredicate().getName())); + if (literal.isNegated()) { + writer.write("~"); + } + writePositiveLiteral(literal.getPredicate(), literal.getArguments()); + } + + /** + * Serialize the given predicate and list of terms like a + * {@link PositiveLiteral}. + * + * @param predicate a {@link Predicate} + * @param arguments a list of {@link Term} arguments + * @throws IOException + */ + public void writePositiveLiteral(Predicate predicate, List arguments) throws IOException { + writer.write(getIri(predicate.getName())); writer.write("("); boolean first = true; - for (final Term term : literal.getArguments()) { + for (final Term term : arguments) { if (first) { first = false; } else { diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/UniversalVariableImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/UniversalVariableImpl.java index ce6b40aa6..4dde3f11d 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/UniversalVariableImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/UniversalVariableImpl.java @@ -46,6 +46,6 @@ public T accept(TermVisitor termVisitor) { @Override public String toString() { - return getSyntacticRepresentation(); + return Serializer.getSerialization(serializer -> serializer.writeUniversalVariable(this)); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java index ff9a2d8a0..3f759f573 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java @@ -23,7 +23,9 @@ import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; -import java.io.OutputStream; +import java.io.OutputStreamWriter; +import java.io.Writer; +import java.nio.charset.StandardCharsets; import java.util.List; import java.util.function.BiConsumer; import java.util.stream.Stream; @@ -36,13 +38,14 @@ import org.semanticweb.rulewerk.core.model.api.NamedNull; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.QueryResult; import org.semanticweb.rulewerk.core.model.api.Rule; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.api.TermType; import org.semanticweb.rulewerk.core.model.api.Variable; import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** * Interface that exposes the (existential) rule reasoning capabilities of a @@ -131,11 +134,16 @@ default Correctness unsafeForEachInference(BiConsumer> act * reasoning (materialisation) and its {@link KnowledgeBase}. * @throws IOException */ - default Correctness writeInferences(OutputStream stream) throws IOException { - final KnowledgeBase knowledgeBase = getKnowledgeBase(); - stream.write(OldSerializer.getBaseAndPrefixDeclarations(knowledgeBase).getBytes()); - return forEachInference((predicate, termList) -> stream - .write(OldSerializer.getFactString(predicate, termList, knowledgeBase::unresolveAbsoluteIri).getBytes())); + default Correctness writeInferences(Writer writer) throws IOException { + final PrefixDeclarationRegistry prefixDeclarationRegistry = getKnowledgeBase().getPrefixDeclarationRegistry(); + final Serializer serializer = new Serializer(writer, prefixDeclarationRegistry); + + serializer.writePrefixDeclarationRegistry(prefixDeclarationRegistry); + + return forEachInference((predicate, termList) -> { + serializer.writePositiveLiteral(predicate, termList); + writer.write(" .\n"); + }); } /** @@ -160,18 +168,21 @@ default Stream getInferences() { Correctness getCorrectness(); /** - * Exports all the (explicit and implicit) facts inferred during - * reasoning of the knowledge base to a desired file. + * Exports all the (explicit and implicit) facts inferred during reasoning of + * the knowledge base to a desired file. * * @param filePath a String of the file path for the facts to be written to. * @return the correctness of the query answers, depending on the state of the * reasoning (materialisation) and its {@link KnowledgeBase}. * @throws IOException * @throws FileNotFoundException + * @deprecated Use {@link KnowledgeBase#writeInferences(Writer)} instead. The + * method will disappear. */ + @Deprecated default Correctness writeInferences(String filePath) throws FileNotFoundException, IOException { - try (OutputStream stream = new FileOutputStream(filePath)) { - return writeInferences(stream); + try (Writer writer = new OutputStreamWriter(new FileOutputStream(filePath), StandardCharsets.UTF_8)) { + return this.writeInferences(writer); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java index c3347b47a..66fcf39c9 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java @@ -23,9 +23,6 @@ import java.io.IOException; import java.util.Arrays; -import org.semanticweb.rulewerk.core.model.api.Fact; -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; - /** * An {@code CsvFileDataSource} stores facts in the CSV format inside a file of * the extension {@code .csv}. These fact tuples can be associated with a single @@ -51,6 +48,11 @@ */ public class CsvFileDataSource extends FileDataSource { + /** + * The name of the predicate used for declarations of data sources of this type. + */ + public static final String declarationPredicateName = "load-csv"; + private static final Iterable possibleExtensions = Arrays.asList(".csv", ".csv.gz"); /** @@ -73,11 +75,6 @@ public String toString() { return "CsvFileDataSource [csvFile=" + getFile() + "]"; } - @Override - public String getSyntacticRepresentation() { - return OldSerializer.getString(this); - } - @Override public void accept(DataSourceConfigurationVisitor visitor) throws IOException { visitor.visit(this); @@ -85,6 +82,6 @@ public void accept(DataSourceConfigurationVisitor visitor) throws IOException { @Override String getDeclarationPredicateName() { - return "load-csv"; + return declarationPredicateName; } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java index 3dc4c9d2b..18fe4b181 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java @@ -24,8 +24,6 @@ import java.util.Arrays; import java.util.Optional; -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; - /** * An {@code RdfFileDataSource} stores facts in the RDF N-Triples format inside * a file of the extension {@code .nt}. These fact triples can be associated @@ -49,6 +47,11 @@ */ public class RdfFileDataSource extends FileDataSource { + /** + * The name of the predicate used for declarations of data sources of this type. + */ + public static final String declarationPredicateName = "load-rdf"; + private final static Iterable possibleExtensions = Arrays.asList(".nt", ".nt.gz"); /** @@ -71,11 +74,6 @@ public String toString() { return "RdfFileDataSource [rdfFile=" + this.getFile() + "]"; } - @Override - public String getSyntacticRepresentation() { - return OldSerializer.getString(this); - } - @Override public Optional getRequiredArity() { return Optional.of(3); @@ -88,6 +86,6 @@ public void accept(DataSourceConfigurationVisitor visitor) throws IOException { @Override String getDeclarationPredicateName() { - return "load-rdf"; + return declarationPredicateName; } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java index b89e22068..da80ea3b4 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java @@ -32,7 +32,6 @@ import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.api.Variable; import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; /** * A SparqlQueryResultDataSource provide the results of a SPARQL query on a @@ -43,6 +42,11 @@ */ public class SparqlQueryResultDataSource implements ReasonerDataSource { + /** + * The name of the predicate used for declarations of data sources of this type. + */ + public static final String declarationPredicateName = "sparql"; + private final URL endpoint; private final String queryVariables; private final String queryBody; @@ -160,11 +164,6 @@ public String toString() { + ", queryBody=" + this.queryBody + "]"; } - @Override - public String getSyntacticRepresentation() { - return OldSerializer.getString(this); - } - @Override public void accept(DataSourceConfigurationVisitor visitor) { visitor.visit(this); @@ -172,7 +171,7 @@ public void accept(DataSourceConfigurationVisitor visitor) { @Override public Fact getDeclarationFact() { - Predicate predicate = Expressions.makePredicate("sparql", 3); + Predicate predicate = Expressions.makePredicate(declarationPredicateName, 3); Term endpointTerm = Expressions.makeAbstractConstant(getEndpoint().toString()); Term variablesTerm = Expressions.makeDatatypeConstant(getQueryVariables(), PrefixDeclarationRegistry.XSD_STRING); diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java index 3c415a190..ef5c89e30 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java @@ -33,7 +33,6 @@ import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; @@ -89,7 +88,7 @@ public void toString_CsvFileDataSource() throws IOException { final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate2, unzippedCsvFileDataSource); - final String expectedFilePath = OldSerializer.getString(relativeDirName + fileName); + final String expectedFilePath = "\"" + relativeDirName + fileName + "\""; assertEquals("@source q[1]: load-csv(" + expectedFilePath + ") .", dataSourceDeclaration.toString()); } @@ -114,7 +113,7 @@ public void toString_RdfFileDataSource_relativePath() throws IOException { final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, unzippedRdfFileDataSource); - final String expectedFilePath = OldSerializer.getString(relativeDirName + fileName); + final String expectedFilePath = "\"" + relativeDirName + fileName + "\""; assertEquals("@source q[1]: load-rdf(" + expectedFilePath + ") .", dataSourceDeclaration.toString()); } } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java index 871fec96f..9ecff982a 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java @@ -68,6 +68,7 @@ public class SerializerTest { static PositiveLiteral l1 = Expressions.makePositiveLiteral(p1, universalVariable); static Literal l2 = Expressions.makePositiveLiteral(p2, universalVariable, abstractConstantShort); static Rule rule = Expressions.makeRule(l1, l2, fact); + static Literal ln1 = Expressions.makeNegativeLiteral(p1, existentialVariable); StringWriter writer; Serializer serializer; @@ -161,6 +162,24 @@ public void serializeSparqlDataSourceDeclaration() throws IOException { assertEquals("@source p1[1]: sparql(, \"var\", \"?var \") .", writer.toString()); } + @Test + public void serializePositiveLiteral() throws IOException { + serializer.writeLiteral(l1); + assertEquals("p1(?X)", writer.toString()); + } + + @Test + public void serializePositiveLiteralFromTerms() throws IOException { + serializer.writePositiveLiteral(l1.getPredicate(),l1.getArguments()); + assertEquals("p1(?X)", writer.toString()); + } + + @Test + public void serializeNegativeLiteral() throws IOException { + serializer.writeLiteral(ln1); + assertEquals("~p1(!X)", writer.toString()); + } + @Test public void serializeAbstractConstantWithPrefixDeclarations() throws IOException { final MergingPrefixDeclarationRegistry prefixes = new MergingPrefixDeclarationRegistry(); diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/TermImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/TermImplTest.java index 9aa9f7129..2420dc479 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/TermImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/TermImplTest.java @@ -97,10 +97,10 @@ public void abstractConstantGetterTest() { @Test public void datatypeConstantGetterTest() { - DatatypeConstant c = new DatatypeConstantImpl("c", PrefixDeclarationRegistry.XSD_STRING); + DatatypeConstant c = new DatatypeConstantImpl("c", "http://example.org/type"); assertEquals("c", c.getLexicalValue()); - assertEquals("http://www.w3.org/2001/XMLSchema#string", c.getDatatype()); - assertEquals("\"c\"^^", c.getName()); + assertEquals("http://example.org/type", c.getDatatype()); + assertEquals("\"c\"^^", c.getName()); assertEquals(TermType.DATATYPE_CONSTANT, c.getType()); } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java index ca4561dac..d8ce99ddb 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java @@ -1,5 +1,9 @@ package org.semanticweb.rulewerk.parser; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; + /*- * #%L * Rulewerk Parser @@ -20,7 +24,6 @@ * #L% */ -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; import org.semanticweb.rulewerk.parser.datasources.CsvFileDataSourceDeclarationHandler; import org.semanticweb.rulewerk.parser.datasources.RdfFileDataSourceDeclarationHandler; import org.semanticweb.rulewerk.parser.datasources.SparqlQueryResultDataSourceDeclarationHandler; @@ -43,9 +46,9 @@ public DefaultParserConfiguration() { * Register built-in data sources (currently CSV, RDF, SPARQL). */ private void registerDefaultDataSources() { - registerDataSource(OldSerializer.CSV_FILE_DATA_SOURCE, new CsvFileDataSourceDeclarationHandler()); - registerDataSource(OldSerializer.RDF_FILE_DATA_SOURCE, new RdfFileDataSourceDeclarationHandler()); - registerDataSource(OldSerializer.SPARQL_QUERY_RESULT_DATA_SOURCE, + registerDataSource(CsvFileDataSource.declarationPredicateName, new CsvFileDataSourceDeclarationHandler()); + registerDataSource(RdfFileDataSource.declarationPredicateName, new RdfFileDataSourceDeclarationHandler()); + registerDataSource(SparqlQueryResultDataSource.declarationPredicateName, new SparqlQueryResultDataSourceDeclarationHandler()); } diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/EntityTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/EntityTest.java index 910e9375b..442e56de9 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/EntityTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/EntityTest.java @@ -185,12 +185,10 @@ public void literalToStringRoundTripTest() throws ParsingException { @Test public void datatypeDoubleConstantToStringRoundTripTest() throws ParsingException { - String shortDoubleConstant = "12.345E67"; - assertEquals(shortDoubleConstant, - RuleParser.parseFact("p(\"" + shortDoubleConstant + "\"^^).") - .getArguments().get(0).toString()); - assertEquals(shortDoubleConstant, - RuleParser.parseFact("p(" + shortDoubleConstant + ").").getArguments().get(0).toString()); + String doubleConstant = "\"12.345E67\"^^"; + assertEquals(doubleConstant, + RuleParser.parseFact("p(" + doubleConstant + ").").getArguments().get(0).toString()); + assertEquals(doubleConstant, RuleParser.parseFact("p(12.345E67).").getArguments().get(0).toString()); } @Test @@ -221,11 +219,9 @@ public void datatypeIntegerConstantToStringRoundTripTest() throws ParsingExcepti @Test public void datatypeDecimalToStringRoundTripTest() throws ParsingException { - String shortDecimalConstant = "0.23"; - assertEquals(shortDecimalConstant, - RuleParser.parseFact("p(\"" + shortDecimalConstant + "\"^^).") - .getArguments().get(0).toString()); - assertEquals(shortDecimalConstant, - RuleParser.parseFact("p(" + shortDecimalConstant + ").").getArguments().get(0).toString()); + String decimalConstant = "\"0.23\"^^"; + assertEquals(decimalConstant, + RuleParser.parseFact("p(" + decimalConstant + ").").getArguments().get(0).toString()); + assertEquals(decimalConstant, RuleParser.parseFact("p(0.23).").getArguments().get(0).toString()); } } diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java index 271a49598..42041cc6d 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java @@ -249,7 +249,7 @@ public void parseLiteral_escapeSequences_succeeds() throws ParsingException { public void parseLiteral_escapeSequences_roundTrips() throws ParsingException { PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("_\"_\\_\n_\t_", PrefixDeclarationRegistry.XSD_STRING)); - assertEquals(fact, RuleParser.parseLiteral(fact.getSyntacticRepresentation())); + assertEquals(fact, RuleParser.parseLiteral(fact.toString())); } @Test @@ -265,7 +265,7 @@ public void parseLiteral_allEscapeSequences_succeeds() throws ParsingException { public void parseLiteral_allEscapeSequences_roundTrips() throws ParsingException { PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("_\n_\t_\r_\b_\f_\'_\"_\\_", PrefixDeclarationRegistry.XSD_STRING)); - assertEquals(fact, RuleParser.parseLiteral(fact.getSyntacticRepresentation())); + assertEquals(fact, RuleParser.parseLiteral(fact.toString())); } @Test(expected = ParsingException.class) @@ -292,7 +292,7 @@ public void parseLiteral_multiLineLiteral_succeeds() throws ParsingException { public void parseLiteral_multiLineLiteral_roundTrips() throws ParsingException { PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("line 1\n\nline 2\nline 3", PrefixDeclarationRegistry.XSD_STRING)); - assertEquals(fact, RuleParser.parseLiteral(fact.getSyntacticRepresentation())); + assertEquals(fact, RuleParser.parseLiteral(fact.toString())); } @Test(expected = ParsingException.class) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java index 4952cfb84..e406487f9 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java @@ -73,7 +73,7 @@ public String[][] getData() { } @Override - public String getSyntacticRepresentation() { + public String toString() { final StringBuilder sb = new StringBuilder( "This InMemoryDataSource holds the following tuples of constant names, one tuple per line:"); for (int i = 0; i < getData().length; i++) { diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java index 3b7876dda..b237dc947 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java @@ -30,7 +30,6 @@ import org.semanticweb.rulewerk.core.model.implementation.DatatypeConstantImpl; import org.semanticweb.rulewerk.core.model.implementation.LanguageStringConstantImpl; import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; import org.semanticweb.rulewerk.core.reasoner.implementation.QueryResultImpl; /** @@ -83,15 +82,15 @@ static List toTermList(karmaresearch.vlog.Term[] vLogTerms) { static Term toTerm(karmaresearch.vlog.Term vLogTerm) { final String name = vLogTerm.getName(); switch (vLogTerm.getTermType()) { - case CONSTANT: - return toConstant(name); - case BLANK: - return new NamedNullImpl(name); - case VARIABLE: - throw new IllegalArgumentException( - "VLog variables cannot be converted without knowing if they are universally or existentially quantified."); - default: - throw new IllegalArgumentException("Unexpected VLog term type: " + vLogTerm.getTermType()); + case CONSTANT: + return toConstant(name); + case BLANK: + return new NamedNullImpl(name); + case VARIABLE: + throw new IllegalArgumentException( + "VLog variables cannot be converted without knowing if they are universally or existentially quantified."); + default: + throw new IllegalArgumentException("Unexpected VLog term type: " + vLogTerm.getTermType()); } } @@ -103,19 +102,17 @@ static Term toTerm(karmaresearch.vlog.Term vLogTerm) { */ static Constant toConstant(String vLogConstantName) { final Constant constant; - if (vLogConstantName.charAt(0) == OldSerializer.LESS_THAN - && vLogConstantName.charAt(vLogConstantName.length() - 1) == OldSerializer.MORE_THAN) { + if (vLogConstantName.charAt(0) == '<' && vLogConstantName.charAt(vLogConstantName.length() - 1) == '>') { // strip <> off of IRIs constant = new AbstractConstantImpl(vLogConstantName.substring(1, vLogConstantName.length() - 1)); - } else if (vLogConstantName.charAt(0) == OldSerializer.QUOTE) { - if (vLogConstantName.charAt(vLogConstantName.length() - 1) == OldSerializer.MORE_THAN) { - final int startTypeIdx = vLogConstantName.lastIndexOf(OldSerializer.LESS_THAN, - vLogConstantName.length() - 2); + } else if (vLogConstantName.charAt(0) == '"') { + if (vLogConstantName.charAt(vLogConstantName.length() - 1) == '>') { + final int startTypeIdx = vLogConstantName.lastIndexOf('<', vLogConstantName.length() - 2); final String datatype = vLogConstantName.substring(startTypeIdx + 1, vLogConstantName.length() - 1); final String lexicalValue = vLogConstantName.substring(1, startTypeIdx - 3); constant = new DatatypeConstantImpl(lexicalValue, datatype); } else { - final int startTypeIdx = vLogConstantName.lastIndexOf(OldSerializer.AT, vLogConstantName.length() - 2); + final int startTypeIdx = vLogConstantName.lastIndexOf('@', vLogConstantName.length() - 2); if (startTypeIdx > -1) { final String languageTag = vLogConstantName.substring(startTypeIdx + 1, vLogConstantName.length()); final String string = vLogConstantName.substring(1, startTypeIdx - 1); diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java index 19365056e..c5ff2617d 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java @@ -23,8 +23,8 @@ import static org.junit.Assert.*; import static org.mockito.Mockito.*; -import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.io.StringWriter; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; @@ -118,15 +118,15 @@ public void writeInferences_withPrefixDeclarations_abbreviatesIris() assertTrue("the abbreviated fact is present", getInferences().contains("eg:s(eg:c) .")); } - @Test + @Test(expected = RulewerkRuntimeException.class) public void writeInferences_withBase_writesBase() throws IOException, PrefixDeclarationException { PrefixDeclarationRegistry prefixDeclarations = mock(PrefixDeclarationRegistry.class); when(prefixDeclarations.getBaseIri()).thenReturn("http://example.org/"); when(prefixDeclarations.iterator()).thenReturn(new HashMap().entrySet().iterator()); kb.mergePrefixDeclarations(prefixDeclarations); - - assertEquals(11, getInferences().size()); - assertTrue("the base declaration is present", getInferences().contains("@base .")); + getInferences(); + //assertEquals(11, getInferences().size()); + //assertTrue("the base declaration is present", getInferences().contains("@base .")); } @Test @@ -134,8 +134,7 @@ public void getInferences_example_succeeds() throws IOException { final List inferences = getInferences(); try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - final List fromStream = reasoner.getInferences().map(Fact::getSyntacticRepresentation) - .collect(Collectors.toList()); + final List fromStream = reasoner.getInferences().map(Fact::toString).collect(Collectors.toList()); assertEquals(inferences, fromStream); } } @@ -148,7 +147,7 @@ public void unsafeForEachInference_example_succeeds() throws IOException { final List fromUnsafe = new ArrayList<>(); reasoner.unsafeForEachInference((Predicate, terms) -> { - fromUnsafe.add(Expressions.makeFact(Predicate, terms).getSyntacticRepresentation()); + fromUnsafe.add(Expressions.makeFact(Predicate, terms).toString()); }); assertEquals(inferences, fromUnsafe); @@ -183,11 +182,10 @@ public void unsafeForEachInference_throwingAction_throws() throws IOException { private List getInferences() throws IOException { try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - ByteArrayOutputStream stream = new ByteArrayOutputStream(); - reasoner.writeInferences(stream); - stream.flush(); + StringWriter writer = new StringWriter(); + reasoner.writeInferences(writer); - Stream inferences = Arrays.stream(stream.toString().split("(?<=[>)]\\s?)\\.\\s*")); + Stream inferences = Arrays.stream(writer.toString().split("(?<=[>)]\\s?)\\.\\s*")); return inferences.map((String inference) -> inference + ".").collect(Collectors.toList()); } From c50a5faaccf49e0fbe51334ddfc82dc72aa02440 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 01:46:18 +0200 Subject: [PATCH 0702/1003] Support serialization of Commands --- .../rulewerk/core/model/api/Command.java | 22 +++------------- .../core/model/implementation/Serializer.java | 25 +++++++++++++++++++ 2 files changed, 29 insertions(+), 18 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java index 1a5dc295b..5240358ad 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java @@ -22,7 +22,7 @@ import java.util.List; -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** * Class for representing a generic command that can be executed. @@ -64,23 +64,9 @@ public List getArguments() { return arguments; } - public String getSyntacticRepresentation() { - StringBuilder result = new StringBuilder("@"); - result.append(name); - for (Argument argument : arguments) { - result.append(" "); - if (argument.fromRule().isPresent()) { - Rule rule = argument.fromRule().get(); - result.append(OldSerializer.getString(rule.getHead())).append(OldSerializer.RULE_SEPARATOR) - .append(OldSerializer.getString(rule.getBody())); - } else if (argument.fromPositiveLiteral().isPresent()) { - result.append(argument.fromPositiveLiteral().get().toString()); - } else { - throw new UnsupportedOperationException("Serialisation of commands is not fully implemented yet."); - } - } - result.append(OldSerializer.STATEMENT_SEPARATOR); - return result.toString(); + @Override + public String toString() { + return Serializer.getSerialization(serializer -> serializer.writeCommand(this)); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index fe851f090..0c1e41d58 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -30,6 +30,8 @@ import java.util.function.Function; import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.Argument; +import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.model.api.Conjunction; import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; @@ -495,6 +497,29 @@ public void writeLanguageStringConstant(LanguageStringConstant languageStringCon writer.write(languageStringConstant.getLanguageTag()); } + /** + * Writes a serialization of the given {@link Command}. + * + * @param command a {@link Command} + * @throws IOException + */ + public void writeCommand(Command command) throws IOException { + writer.write("@"); + writer.write(command.getName()); + + for (Argument argument : command.getArguments()) { + writer.write(" "); + if (argument.fromRule().isPresent()) { + writeRule(argument.fromRule().get()); + } else if (argument.fromPositiveLiteral().isPresent()) { + writeLiteral(argument.fromPositiveLiteral().get()); + } else { + throw new UnsupportedOperationException("Serialisation of commands is not fully implemented yet."); + } + } + writer.write(STATEMENT_END); + } + /** * Convenience method for obtaining serializations as Java strings. * From 423d0b8699aba532c976014df4b948719122eb29 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 01:46:33 +0200 Subject: [PATCH 0703/1003] Avoid deprecated OldSerializer --- .../rulewerk/client/shell/DefaultConfiguration.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java index 927c61701..73477a47b 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java @@ -34,7 +34,6 @@ import org.jline.utils.AttributedString; import org.jline.utils.AttributedStyle; import org.semanticweb.rulewerk.commands.Interpreter; -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; public final class DefaultConfiguration { @@ -63,7 +62,7 @@ public static LineReader buildLineReader(final Terminal terminal, final Interpre private static Completer buildCompleter(final Interpreter interpreter) { final Set registeredCommandNames = interpreter.getRegisteredCommands(); final List serializedCommandNames = registeredCommandNames.stream() - .map(commandName -> OldSerializer.getCommandName(commandName)) + .map(commandName -> "@" + commandName) .collect(Collectors.toList()); return new StringsCompleter(serializedCommandNames); } From 23fe99b61723dbd6d0f7dacfc7ba6bf228164af0 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 01:57:51 +0200 Subject: [PATCH 0704/1003] Remove old serializer code --- .../rulewerk/core/model/api/Conjunction.java | 2 +- .../core/model/api/DataSourceDeclaration.java | 2 +- .../rulewerk/core/model/api/Entity.java | 4 +- .../rulewerk/core/model/api/Literal.java | 2 +- .../rulewerk/core/model/api/Rule.java | 2 +- .../rulewerk/core/model/api/Statement.java | 2 +- .../rulewerk/core/model/api/SyntaxObject.java | 2 +- .../model/implementation/OldSerializer.java | 560 ------------------ 8 files changed, 7 insertions(+), 569 deletions(-) delete mode 100644 rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/OldSerializer.java diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java index 60b53ea1a..f7b8b760f 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java @@ -30,7 +30,7 @@ * @author Markus Krötzsch * */ -public interface Conjunction extends Iterable, SyntaxObject, Entity { +public interface Conjunction extends Iterable, SyntaxObject { /** * Returns the list of literals that are part of this conjunction. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java index 14fb54412..22efc0aae 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java @@ -27,7 +27,7 @@ * @author Markus Kroetzsch * */ -public interface DataSourceDeclaration extends Statement, Entity { +public interface DataSourceDeclaration extends Statement { /** * Returns the {@link Predicate} that this source applies to. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Entity.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Entity.java index c4bfd7a16..75c40aa3d 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Entity.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Entity.java @@ -21,9 +21,7 @@ */ /** - * Interface for every parsable data model that has a string representation - * - * @author Ali Elhalawati + * Most general type of syntactic entity in Rulewerk. * */ public interface Entity { diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java index 896dc1f6f..b345b070d 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java @@ -31,7 +31,7 @@ * @author david.carral@tu-dresden.de * @author Irina Dragoste */ -public interface Literal extends SyntaxObject, Entity { +public interface Literal extends SyntaxObject { boolean isNegated(); diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Rule.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Rule.java index 6b4e0ea79..9187282eb 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Rule.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Rule.java @@ -30,7 +30,7 @@ * @author Markus Krötzsch * */ -public interface Rule extends SyntaxObject, Statement, Entity { +public interface Rule extends SyntaxObject, Statement { /** * Returns the conjunction of head literals (the consequence of the rule). diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Statement.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Statement.java index fc2b4b009..f43c03248 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Statement.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Statement.java @@ -27,7 +27,7 @@ * @author Markus Kroetzsch * */ -public interface Statement { +public interface Statement extends Entity { /** * Accept a {@link StatementVisitor} and return its output. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/SyntaxObject.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/SyntaxObject.java index 31de18cf6..7dcf50142 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/SyntaxObject.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/SyntaxObject.java @@ -30,7 +30,7 @@ * @author Markus Kroetzsch * */ -public interface SyntaxObject { +public interface SyntaxObject extends Entity { /** * Returns the stream of distinct terms that occur in this object. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/OldSerializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/OldSerializer.java deleted file mode 100644 index 44cb41273..000000000 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/OldSerializer.java +++ /dev/null @@ -1,560 +0,0 @@ -package org.semanticweb.rulewerk.core.model.implementation; - -/*- - * #%L - * Rulewerk Core Components - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.util.List; -import java.util.Map.Entry; -import java.util.function.Function; - -import org.semanticweb.rulewerk.core.model.api.AbstractConstant; -import org.semanticweb.rulewerk.core.model.api.Conjunction; -import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; -import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; -import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; -import org.semanticweb.rulewerk.core.model.api.Fact; -import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; -import org.semanticweb.rulewerk.core.model.api.Literal; -import org.semanticweb.rulewerk.core.model.api.NamedNull; -import org.semanticweb.rulewerk.core.model.api.Predicate; -import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; -import org.semanticweb.rulewerk.core.model.api.Rule; -import org.semanticweb.rulewerk.core.model.api.Term; -import org.semanticweb.rulewerk.core.model.api.UniversalVariable; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; - -/** - * A utility class with static methods to obtain the correct parsable string - * representation of the different data models. - * - * @author Ali Elhalawati - * - */ -@Deprecated -public final class OldSerializer { - private static final String NEW_LINE = "\n"; - public static final String STATEMENT_SEPARATOR = " ."; - public static final String COMMA = ", "; - public static final String NEGATIVE_IDENTIFIER = "~"; - public static final String EXISTENTIAL_IDENTIFIER = "!"; - public static final String UNIVERSAL_IDENTIFIER = "?"; - public static final String NAMEDNULL_IDENTIFIER = "_:"; - public static final String OPENING_PARENTHESIS = "("; - public static final String CLOSING_PARENTHESIS = ")"; - public static final String OPENING_BRACKET = "["; - public static final String CLOSING_BRACKET = "]"; - public static final String RULE_SEPARATOR = " :- "; - public static final char AT = '@'; - public static final String DATA_SOURCE = "@source "; - public static final String BASE = "@base "; - public static final String PREFIX = "@prefix "; - public static final String CSV_FILE_DATA_SOURCE = "load-csv"; - public static final String RDF_FILE_DATA_SOURCE = "load-rdf"; - public static final String SPARQL_QUERY_RESULT_DATA_SOURCE = "sparql"; - public static final String DATA_SOURCE_SEPARATOR = ": "; - public static final String COLON = ":"; - public static final String DOUBLE_CARET = "^^"; - public static final char LESS_THAN = '<'; - public static final char MORE_THAN = '>'; - public static final char QUOTE = '"'; - - public static final String REGEX_DOUBLE = "^[-+]?[0-9]+[.]?[0-9]*([eE][-+]?[0-9]+)?$"; - public static final String REGEX_INTEGER = "^[-+]?\\d+$"; - public static final String REGEX_DECIMAL = "^(\\d*\\.)?\\d+$"; - public static final String REGEX_TRUE = "true"; - public static final String REGEX_FALSE = "false"; - - /** - * Constructor. - */ - private OldSerializer() { - - } - - /** - * Creates a String representation of a given {@link Rule}. - * - * @see Rule - * syntax - * @param rule a {@link Rule}. - * @return String representation corresponding to a given {@link Rule}. - * - */ - public static String getString(final Rule rule) { - return getString(rule.getHead()) + RULE_SEPARATOR + getString(rule.getBody()) + STATEMENT_SEPARATOR; - } - - /** - * Creates a String representation of a given {@link Conjunction}. - * - * @see Rule - * syntax - * @param conjunction a {@link Conjunction} - * @return String representation corresponding to a given {@link Conjunction}. - */ - public static String getString(final Conjunction conjunction) { - final StringBuilder stringBuilder = new StringBuilder(); - boolean first = true; - for (final Literal literal : conjunction.getLiterals()) { - if (first) { - first = false; - } else { - stringBuilder.append(COMMA); - } - stringBuilder.append(getString(literal)); - } - return stringBuilder.toString(); - } - - /** - * Creates a String representation of a given {@link Literal}. - * - * @see Rule - * syntax - * @param literal a {@link Literal} - * @return String representation corresponding to a given {@link Literal}. - */ - public static String getString(final Literal literal) { - final StringBuilder stringBuilder = new StringBuilder(""); - if (literal.isNegated()) { - stringBuilder.append(NEGATIVE_IDENTIFIER); - } - stringBuilder.append(getString(literal.getPredicate(), literal.getArguments())); - return stringBuilder.toString(); - } - - /** - * Creates a String representation of a given {@link Fact}. - * - * @see Rule - * syntax - * @param fact a {@link Fact} - * @return String representation corresponding to a given {@link Fact}. - */ - public static String getFactString(final Fact fact) { - return getString(fact) + STATEMENT_SEPARATOR; - } - - /** - * Creates a String representation of a given {@link AbstractConstant}. - * - * @see Rule - * syntax - * @param constant a {@link AbstractConstant} - * @param iriTransformer a function to transform IRIs with. - * @return String representation corresponding to a given - * {@link AbstractConstant}. - */ - public static String getString(final AbstractConstant constant, final Function iriTransformer) { - return getIRIString(constant.getName(), iriTransformer); - } - - /** - * Creates a String representation of a given {@link AbstractConstant}. - * - * @see Rule - * syntax - * @param constant a {@link AbstractConstant} - * @return String representation corresponding to a given - * {@link AbstractConstant}. - */ - public static String getString(final AbstractConstant constant) { - return getIRIString(constant.getName()); - } - - /** - * Creates a String representation corresponding to the name of a given - * {@link LanguageStringConstant}. - * - * @see Rule - * syntax - * @param languageStringConstant a {@link LanguageStringConstant} - * @return String representation corresponding to the name of a given - * {@link LanguageStringConstant}. - */ - public static String getConstantName(final LanguageStringConstant languageStringConstant) { - return getString(languageStringConstant.getString()) + AT + languageStringConstant.getLanguageTag(); - } - - /** - * Creates a String representation corresponding to the given - * {@link DatatypeConstant}. For datatypes that have specialised lexical - * representations (i.e., xsd:String, xsd:Decimal, xsd:Integer, and xsd:Double), - * this representation is returned, otherwise the result is a generic literal - * with full datatype IRI. - * - * examples: - *
      - *
    • {@code "string"^^xsd:String} results in {@code "string"},
    • - *
    • {@code "23.0"^^xsd:Decimal} results in {@code 23.0},
    • - *
    • {@code "42"^^xsd:Integer} results in {@code 42},
    • - *
    • {@code "23.42"^^xsd:Double} results in {@code 23.42E0}, and
    • - *
    • {@code "test"^^} results in - * {@code "test"^^}, modulo transformation of the datatype - * IRI.
    • - *
    - * - * @see Rule - * syntax - * @param datatypeConstant a {@link DatatypeConstant} - * @param iriTransformer a function to transform IRIs with. - * @return String representation corresponding to a given - * {@link DatatypeConstant}. - */ - public static String getString(final DatatypeConstant datatypeConstant, - final Function iriTransformer) { - if (datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_STRING)) { - return getString(datatypeConstant.getLexicalValue()); - } else if (datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_DECIMAL) - || datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_INTEGER) - || datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_DOUBLE)) { - return datatypeConstant.getLexicalValue(); - } - - return getConstantName(datatypeConstant, iriTransformer); - } - - /** - * Creates a String representation corresponding to the given - * {@link DatatypeConstant}. For datatypes that have specialised lexical - * representations (i.e., xsd:String, xsd:Decimal, xsd:Integer, and xsd:Double), - * this representation is returned, otherwise the result is a generic literal - * with full datatype IRI. - * - * examples: - *
      - *
    • {@code "string"^^xsd:String} results in {@code "string"},
    • - *
    • {@code "23.0"^^xsd:Decimal} results in {@code 23.0},
    • - *
    • {@code "42"^^xsd:Integer} results in {@code 42},
    • - *
    • {@code "23.42"^^xsd:Double} results in {@code 23.42E0}, and
    • - *
    • {@code "test"^^} results in - * {@code "test"^^}.
    • - *
    - * - * @param datatypeConstant a {@link DatatypeConstant} - * @return String representation corresponding to a given - * {@link DatatypeConstant}. - */ - public static String getString(final DatatypeConstant datatypeConstant) { - return getString(datatypeConstant, Function.identity()); - } - - /** - * Creates a String representation corresponding to the name of a given - * {@link DatatypeConstant} including an IRI. - * - * @see Rule - * syntax - * @param datatypeConstant a {@link DatatypeConstant} - * @return String representation corresponding to a given - * {@link DatatypeConstant}. - */ - private static String getConstantName(final DatatypeConstant datatypeConstant, - final Function iriTransformer) { - return getString(datatypeConstant.getLexicalValue()) + DOUBLE_CARET - + getIRIString(datatypeConstant.getDatatype(), iriTransformer); - } - - /** - * Creates a String representation corresponding to the name of a given - * {@link DatatypeConstant} including an IRI. - * - * @see Rule - * syntax - * @param datatypeConstant a {@link DatatypeConstant} - * @return String representation corresponding to a given - * {@link DatatypeConstant}. - */ - public static String getConstantName(final DatatypeConstant datatypeConstant) { - return getString(datatypeConstant.getLexicalValue()) + DOUBLE_CARET - + addAngleBrackets(datatypeConstant.getDatatype()); - } - - /** - * Creates a String representation of a given {@link ExistentialVariable}. - * - * @see Rule - * syntax - * @param existentialVariable a {@link ExistentialVariable} - * @return String representation corresponding to a given - * {@link ExistentialVariable}. - */ - public static String getString(final ExistentialVariable existentialVariable) { - return EXISTENTIAL_IDENTIFIER + existentialVariable.getName(); - } - - /** - * Creates a String representation of a given {@link UniversalVariable}. - * - * @see Rule - * syntax - * @param universalVariable a {@link UniversalVariable} - * @return String representation corresponding to a given - * {@link UniversalVariable}. - */ - public static String getString(final UniversalVariable universalVariable) { - return UNIVERSAL_IDENTIFIER + universalVariable.getName(); - } - - /** - * Creates a String representation of a given {@link NamedNull}. - * - * @see Rule - * syntax - * @param namedNull a {@link NamedNull} - * @return String representation corresponding to a given {@link NamedNull}. - */ - public static String getString(final NamedNull namedNull) { - return NAMEDNULL_IDENTIFIER + namedNull.getName(); - } - - /** - * Creates a String representation of a given {@link Predicate}. - * - * @see Rule - * syntax - * @param predicate a {@link Predicate} - * @return String representation corresponding to a given {@link Predicate}. - */ - public static String getString(final Predicate predicate) { - return predicate.getName() + OPENING_BRACKET + predicate.getArity() + CLOSING_BRACKET; - } - - /** - * Creates a String representation of a given {@link DataSourceDeclaration}. - * - * @see Rule - * syntax - * @param dataSourceDeclaration a {@link DataSourceDeclaration} - * @return String representation corresponding to a given - * {@link DataSourceDeclaration}. - */ - public static String getString(final DataSourceDeclaration dataSourceDeclaration) { - return DATA_SOURCE + getString(dataSourceDeclaration.getPredicate()) + DATA_SOURCE_SEPARATOR; - // + dataSourceDeclaration.getDataSource().getSyntacticRepresentation() + - // STATEMENT_SEPARATOR; - } - - /** - * Creates a String representation of a given {@link CsvFileDataSource}. - * - * @see Rule - * syntax - * - * @param csvFileDataSource - * @return String representation corresponding to a given - * {@link CsvFileDataSource}. - */ - public static String getString(final CsvFileDataSource csvFileDataSource) { - return CSV_FILE_DATA_SOURCE + OPENING_PARENTHESIS + getFileString(csvFileDataSource) + CLOSING_PARENTHESIS; - } - - /** - * Creates a String representation of a given {@link RdfFileDataSource}. - * - * @see Rule - * syntax - * - * - * @param rdfFileDataSource - * @return String representation corresponding to a given - * {@link RdfFileDataSource}. - */ - public static String getString(final RdfFileDataSource rdfFileDataSource) { - return RDF_FILE_DATA_SOURCE + OPENING_PARENTHESIS + getFileString(rdfFileDataSource) + CLOSING_PARENTHESIS; - } - - /** - * Creates a String representation of a given - * {@link SparqlQueryResultDataSource}. - * - * @see Rule - * syntax - * - * - * @param dataSource - * @return String representation corresponding to a given - * {@link SparqlQueryResultDataSource}. - */ - public static String getString(final SparqlQueryResultDataSource dataSource) { - return SPARQL_QUERY_RESULT_DATA_SOURCE + OPENING_PARENTHESIS - + addAngleBrackets(dataSource.getEndpoint().toString()) + COMMA - + addQuotes(dataSource.getQueryVariables()) + COMMA + addQuotes(dataSource.getQueryBody()) - + CLOSING_PARENTHESIS; - } - - private static String getFileString(final FileDataSource fileDataSource) { - return getString(fileDataSource.getPath()); - } - - private static String getIRIString(final String string) { - return getIRIString(string, Function.identity()); - } - - private static String getIRIString(final String string, final Function iriTransformer) { - final String transformed = iriTransformer.apply(string); - - if (!transformed.equals(string)) { - return transformed; - } - - if (string.contains(COLON) || string.matches(REGEX_INTEGER) || string.matches(REGEX_DOUBLE) - || string.matches(REGEX_DECIMAL) || string.equals(REGEX_TRUE) || string.equals(REGEX_FALSE)) { - return addAngleBrackets(string); - } - - return string; - } - - /** - * Constructs the parseable, serialized representation of given {@code string}. - * Escapes (with {@code \}) special character occurrences in given - * {@code string}, and surrounds the result with double quotation marks - * ({@code "}). The special characters are: - *
      - *
    • {@code \}
    • - *
    • {@code "}
    • - *
    • {@code \t}
    • - *
    • {@code \b}
    • - *
    • {@code \n}
    • - *
    • {@code \r}
    • - *
    • {@code \f}
    • - *
    - * Example for {@code string = "\\a"}, the returned value is - * {@code string = "\"\\\\a\""} - * - * @param string - * @return an escaped string surrounded by {@code "}. - */ - public static String getString(final String string) { - return addQuotes(escape(string)); - } - - /** - * Escapes (with {@code \}) special character occurrences in given - * {@code string}. The special characters are: - *
      - *
    • {@code \}
    • - *
    • {@code "}
    • - *
    • {@code \t}
    • - *
    • {@code \b}
    • - *
    • {@code \n}
    • - *
    • {@code \r}
    • - *
    • {@code \f}
    • - *
    - * - * @param string - * @return an escaped string - */ - private static String escape(final String string) { - return string.replace("\\", "\\\\").replace("\"", "\\\"").replace("\t", "\\t").replace("\b", "\\b") - .replace(NEW_LINE, "\\n").replace("\r", "\\r").replace("\f", "\\f"); - // don't touch single quotes here since we only construct double-quoted strings - } - - private static String addQuotes(final String string) { - return QUOTE + string + QUOTE; - } - - private static String addAngleBrackets(final String string) { - return LESS_THAN + string + MORE_THAN; - } - - public static String getFactString(final Predicate predicate, final List terms) { - return getString(predicate, terms) + STATEMENT_SEPARATOR + NEW_LINE; - } - - public static String getFactString(final Predicate predicate, final List terms, - final Function iriTransformer) { - return getString(predicate, terms, iriTransformer) + STATEMENT_SEPARATOR + NEW_LINE; - } - - public static String getString(final Predicate predicate, final List terms) { - return getString(predicate, terms, Function.identity()); - } - - public static String getString(final Predicate predicate, final List terms, - final Function iriTransformer) { - final StringBuilder stringBuilder = new StringBuilder(getIRIString(predicate.getName(), iriTransformer)); - stringBuilder.append(OPENING_PARENTHESIS); - - boolean first = true; - for (final Term term : terms) { - if (first) { - first = false; - } else { - stringBuilder.append(COMMA); - } - final String string = term.getName();// term.getSyntacticRepresentation(iriTransformer); - stringBuilder.append(string); - } - stringBuilder.append(CLOSING_PARENTHESIS); - return stringBuilder.toString(); - } - - public static String getBaseString(final KnowledgeBase knowledgeBase) { - final String baseIri = knowledgeBase.getBaseIri(); - - return baseIri.equals(PrefixDeclarationRegistry.EMPTY_BASE) ? baseIri : getBaseDeclarationString(baseIri); - } - - private static String getBaseDeclarationString(final String baseIri) { - return BASE + addAngleBrackets(baseIri) + STATEMENT_SEPARATOR + NEW_LINE; - } - - public static String getPrefixString(final Entry prefix) { - return PREFIX + prefix.getKey() + " " + addAngleBrackets(prefix.getValue()) + STATEMENT_SEPARATOR + NEW_LINE; - } - - public static String getBaseAndPrefixDeclarations(final KnowledgeBase knowledgeBase) { - final StringBuilder sb = new StringBuilder(); - - sb.append(getBaseString(knowledgeBase)); - knowledgeBase.getPrefixes().forEachRemaining(prefix -> sb.append(getPrefixString(prefix))); - - return sb.toString(); - } - - public static String getCommandName(final String commandName) { - return AT + commandName; - } -} From 4e6d581e0e048b6fb76779f37369c100cd91ac57 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 09:20:45 +0200 Subject: [PATCH 0705/1003] fix Command serialization --- .../core/model/implementation/Serializer.java | 16 +++++++++++++--- .../rulewerk/core/model/SerializerTest.java | 19 +++++++++++++++++-- 2 files changed, 30 insertions(+), 5 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index 0c1e41d58..740200569 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -290,10 +290,20 @@ public void writeFact(Fact fact) throws IOException { * @throws IOException */ public void writeRule(Rule rule) throws IOException { + writeRuleNoStatment(rule); + writer.write(STATEMENT_END); + } + + /** + * Writes a serialization of the given {@link Rule} without the final dot. + * + * @param rule a {@link Rule} + * @throws IOException + */ + private void writeRuleNoStatment(Rule rule) throws IOException { writeLiteralConjunction(rule.getHead()); writer.write(" :- "); writeLiteralConjunction(rule.getBody()); - writer.write(STATEMENT_END); } /** @@ -510,11 +520,11 @@ public void writeCommand(Command command) throws IOException { for (Argument argument : command.getArguments()) { writer.write(" "); if (argument.fromRule().isPresent()) { - writeRule(argument.fromRule().get()); + writeRuleNoStatment(argument.fromRule().get()); } else if (argument.fromPositiveLiteral().isPresent()) { writeLiteral(argument.fromPositiveLiteral().get()); } else { - throw new UnsupportedOperationException("Serialisation of commands is not fully implemented yet."); + writeTerm(argument.fromTerm().get()); } } writer.write(STATEMENT_END); diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java index 9ecff982a..332524344 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java @@ -26,10 +26,13 @@ import java.io.StringWriter; import java.io.Writer; import java.net.URL; +import java.util.ArrayList; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.model.api.Argument; +import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.Literal; @@ -167,10 +170,10 @@ public void serializePositiveLiteral() throws IOException { serializer.writeLiteral(l1); assertEquals("p1(?X)", writer.toString()); } - + @Test public void serializePositiveLiteralFromTerms() throws IOException { - serializer.writePositiveLiteral(l1.getPredicate(),l1.getArguments()); + serializer.writePositiveLiteral(l1.getPredicate(), l1.getArguments()); assertEquals("p1(?X)", writer.toString()); } @@ -211,6 +214,18 @@ public void serializePrefixDeclarations() throws IOException { assertEquals("@base .\n@prefix eg: .\n", writer.toString()); } + @Test + public void serializeCommand() throws IOException { + ArrayList arguments = new ArrayList<>(); + arguments.add(Argument.term(abstractConstant)); + arguments.add(Argument.positiveLiteral(fact)); + arguments.add(Argument.rule(rule)); + Command command = new Command("command", arguments); + + serializer.writeCommand(command); + assertEquals("@command p1(c) p1(?X) :- p2(?X, c), p1(c) .", writer.toString()); + } + @Test public void createThrowingSerializer_succeeds() throws IOException { getThrowingSerializer(); From 12c9b9248509736089ee08245170e4ec2367ee5e Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 09:38:52 +0200 Subject: [PATCH 0706/1003] nicer serialisation of KB --- .../core/model/implementation/Serializer.java | 11 ++++++++-- .../rulewerk/core/reasoner/KnowledgeBase.java | 16 +++++++++++++- .../rulewerk/core/model/SerializerTest.java | 21 ++++++++++++++++++- .../core/reasoner/KnowledgeBaseTest.java | 2 +- 4 files changed, 45 insertions(+), 5 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index 740200569..810b85a0e 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -467,12 +467,16 @@ public void writeNamedNull(NamedNull namedNull) throws IOException { } /** - * Writes a serialization of the given {@link PrefixDeclarationRegistry}. + * Writes a serialization of the given {@link PrefixDeclarationRegistry}, and + * returns true if anything has been written. * * @param prefixDeclarationRegistry a {@link PrefixDeclarationRegistry} * @throws IOException + * @return true if anything has been written */ - public void writePrefixDeclarationRegistry(PrefixDeclarationRegistry prefixDeclarationRegistry) throws IOException { + public boolean writePrefixDeclarationRegistry(PrefixDeclarationRegistry prefixDeclarationRegistry) + throws IOException { + boolean result = false; final String baseIri = prefixDeclarationRegistry.getBaseIri(); if (!PrefixDeclarationRegistry.EMPTY_BASE.contentEquals(baseIri)) { writer.write("@base <"); @@ -480,6 +484,7 @@ public void writePrefixDeclarationRegistry(PrefixDeclarationRegistry prefixDecla writer.write(">"); writer.write(STATEMENT_END); writer.write("\n"); + result = true; } Iterator> prefixIterator = prefixDeclarationRegistry.iterator(); @@ -492,7 +497,9 @@ public void writePrefixDeclarationRegistry(PrefixDeclarationRegistry prefixDecla writer.write(">"); writer.write(STATEMENT_END); writer.write("\n"); + result = true; } + return result; } /** diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java index 698683e22..fda90958d 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -591,19 +591,33 @@ public String unresolveAbsoluteIri(String iri) { public void writeKnowledgeBase(Writer writer) throws IOException { Serializer serializer = new Serializer(writer, prefixDeclarationRegistry); - serializer.writePrefixDeclarationRegistry(prefixDeclarationRegistry); + boolean makeSeperator = serializer.writePrefixDeclarationRegistry(prefixDeclarationRegistry); for (DataSourceDeclaration dataSourceDeclaration : this.getDataSourceDeclarations()) { + if (makeSeperator) { + writer.write('\n'); + makeSeperator = false; + } serializer.writeDataSourceDeclaration(dataSourceDeclaration); writer.write('\n'); } + makeSeperator |= !this.getDataSourceDeclarations().isEmpty(); for (Fact fact : this.getFacts()) { + if (makeSeperator) { + writer.write('\n'); + makeSeperator = false; + } serializer.writeFact(fact); writer.write('\n'); } + makeSeperator |= !this.getFacts().isEmpty(); for (Rule rule : this.getRules()) { + if (makeSeperator) { + writer.write('\n'); + makeSeperator = false; + } serializer.writeRule(rule); writer.write('\n'); } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java index 332524344..f495de508 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java @@ -210,8 +210,19 @@ public void serializePrefixDeclarations() throws IOException { prefixes.setPrefixIri("eg:", "http://example.org/"); Serializer prefSerializer = new Serializer(writer, prefixes); - prefSerializer.writePrefixDeclarationRegistry(prefixes); + boolean result = prefSerializer.writePrefixDeclarationRegistry(prefixes); assertEquals("@base .\n@prefix eg: .\n", writer.toString()); + assertTrue(result); + } + + @Test + public void serializeEmptyPrefixDeclarations() throws IOException { + final MergingPrefixDeclarationRegistry prefixes = new MergingPrefixDeclarationRegistry(); + Serializer prefSerializer = new Serializer(writer, prefixes); + + boolean result = prefSerializer.writePrefixDeclarationRegistry(prefixes); + assertEquals("", writer.toString()); + assertFalse(result); } @Test @@ -277,4 +288,12 @@ public void serializeDataSourceDeclaration_fails() throws IOException { getThrowingSerializer().writeStatement(csvSourceDecl); } + @Test(expected = IOException.class) + public void serializePrefixDeclarations_fails() throws IOException { + final MergingPrefixDeclarationRegistry prefixes = new MergingPrefixDeclarationRegistry(); + prefixes.setBaseIri("http://example.org/base"); + prefixes.setPrefixIri("eg:", "http://example.org/"); + getThrowingSerializer().writePrefixDeclarationRegistry(prefixes); + } + } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java index ea3254874..6fa079bce 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java @@ -163,6 +163,6 @@ public void writeKnowledgeBase_alsoRuleAndDataSource_succeeds() throws IOExcepti StringWriter writer = new StringWriter(); this.kb.writeKnowledgeBase(writer); assertEquals("@source S[1]: sparql(<" + sparqlIri + ">, \"?X\", \"" + sparqlBgp - + "\") .\nP(c) .\nP(d) .\nQ(c) .\nP(?X) :- Q(?X) .\n", writer.toString()); + + "\") .\n\nP(c) .\nP(d) .\nQ(c) .\n\nP(?X) :- Q(?X) .\n", writer.toString()); } } From 148bc8df245a3788aea3366fe46dbaab72d757ff Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 10:03:02 +0200 Subject: [PATCH 0707/1003] pretty print query results --- .../commands/QueryCommandInterpreter.java | 10 ++- .../reasoner/LiteralQueryResultPrinter.java | 85 +++++++++++++++++++ 2 files changed, 94 insertions(+), 1 deletion(-) create mode 100644 rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LiteralQueryResultPrinter.java diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java index 5bead8d3f..4637e6cca 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java @@ -1,5 +1,7 @@ package org.semanticweb.rulewerk.commands; +import java.io.IOException; + /*- * #%L * Rulewerk Core Components @@ -28,6 +30,7 @@ import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.api.Terms; import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.LiteralQueryResultPrinter; import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; import org.semanticweb.rulewerk.core.reasoner.Timer; @@ -59,17 +62,22 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio throw new CommandExecutionException("Unrecognized arguments"); } + LiteralQueryResultPrinter printer = new LiteralQueryResultPrinter(literal, interpreter.getWriter(), + interpreter.getKnowledgeBase().getPrefixDeclarationRegistry()); + Timer timer = new Timer("query"); timer.start(); try (final QueryResultIterator answers = interpreter.getReasoner().answerQuery(literal, true)) { int count = 0; while (count != limit && answers.hasNext()) { - interpreter.getWriter().println(" " + answers.next()); + printer.write(answers.next()); count++; } timer.stop(); interpreter.getWriter().println(count + " result(s) in " + timer.getTotalCpuTime() / 1000000 + "ms. Results are " + answers.getCorrectness() + "."); + } catch (IOException e) { + throw new CommandExecutionException(e.getMessage(), e); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LiteralQueryResultPrinter.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LiteralQueryResultPrinter.java new file mode 100644 index 000000000..0335af239 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LiteralQueryResultPrinter.java @@ -0,0 +1,85 @@ +package org.semanticweb.rulewerk.core.reasoner; + +import java.io.IOException; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.Writer; +import java.util.LinkedHashMap; +import java.util.Map.Entry; + +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.TermType; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; + +/** + * Class for writing {@link QueryResult} objects in pretty print. + * + * @author Markus Kroetzsch + * + */ +public class LiteralQueryResultPrinter { + + final LinkedHashMap firstIndex = new LinkedHashMap<>(); + final PrefixDeclarationRegistry prefixDeclarationRegistry; + final Writer writer; + final Serializer serializer; + + public LiteralQueryResultPrinter(PositiveLiteral positiveLiteral, Writer writer, + PrefixDeclarationRegistry prefixDeclarationRegistry) { + this.writer = writer; + this.serializer = new Serializer(writer, prefixDeclarationRegistry); + this.prefixDeclarationRegistry = prefixDeclarationRegistry; + + int i = 0; + for (Term term : positiveLiteral.getArguments()) { + if (term.getType() == TermType.UNIVERSAL_VARIABLE) { + UniversalVariable variable = (UniversalVariable) term; + if (!firstIndex.containsKey(variable)) { + firstIndex.put(variable, i); + } + } + i++; + } + } + + public void write(QueryResult queryResult) throws IOException { + boolean first = true; + for (Entry entry : firstIndex.entrySet()) { + if (first) { + first = false; + } else { + writer.write(", "); + } + serializer.writeUniversalVariable(entry.getKey()); + writer.write(" -> "); + serializer.writeTerm(queryResult.getTerms().get(entry.getValue())); + } + if (first) { + writer.write("true"); + } + writer.write("\n"); + } +} From 9f16e30d2af281a64c7f1ba8225e074de5f6e413 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 10:33:05 +0200 Subject: [PATCH 0708/1003] better display of Boolean query results --- .../commands/QueryCommandInterpreter.java | 15 +++-- .../reasoner/LiteralQueryResultPrinter.java | 61 +++++++++++++++++-- 2 files changed, 65 insertions(+), 11 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java index 4637e6cca..fa6cb3f7a 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java @@ -68,14 +68,19 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio Timer timer = new Timer("query"); timer.start(); try (final QueryResultIterator answers = interpreter.getReasoner().answerQuery(literal, true)) { - int count = 0; - while (count != limit && answers.hasNext()) { + while (printer.getResultCount() != limit && answers.hasNext()) { printer.write(answers.next()); - count++; } timer.stop(); - interpreter.getWriter().println(count + " result(s) in " + timer.getTotalCpuTime() / 1000000 - + "ms. Results are " + answers.getCorrectness() + "."); + + if (printer.isBooleanQuery()) { + interpreter.printEmph(printer.hadResults() ? "true\n" : "false\n"); + interpreter.printNormal("Answered in " + timer.getTotalCpuTime() / 1000000 + "ms."); + } else { + interpreter.printNormal( + printer.getResultCount() + " result(s) in " + timer.getTotalCpuTime() / 1000000 + "ms."); + } + interpreter.printNormal(" Results are " + answers.getCorrectness() + ".\n"); } catch (IOException e) { throw new CommandExecutionException(e.getMessage(), e); } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LiteralQueryResultPrinter.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LiteralQueryResultPrinter.java index 0335af239..129423051 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LiteralQueryResultPrinter.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LiteralQueryResultPrinter.java @@ -43,15 +43,28 @@ public class LiteralQueryResultPrinter { final LinkedHashMap firstIndex = new LinkedHashMap<>(); - final PrefixDeclarationRegistry prefixDeclarationRegistry; final Writer writer; final Serializer serializer; + int resultCount = 0; + + /** + * Constructor. + * + * @param positiveLiteral the query pattern for which query results + * are to be printed + * @param writer the object to write the output to + * @param prefixDeclarationRegistry information on prefixes used to compute IRI + * abbreviations; can be null + */ public LiteralQueryResultPrinter(PositiveLiteral positiveLiteral, Writer writer, PrefixDeclarationRegistry prefixDeclarationRegistry) { this.writer = writer; - this.serializer = new Serializer(writer, prefixDeclarationRegistry); - this.prefixDeclarationRegistry = prefixDeclarationRegistry; + if (prefixDeclarationRegistry == null) { + this.serializer = new Serializer(writer); + } else { + this.serializer = new Serializer(writer, prefixDeclarationRegistry); + } int i = 0; for (Term term : positiveLiteral.getArguments()) { @@ -65,6 +78,14 @@ public LiteralQueryResultPrinter(PositiveLiteral positiveLiteral, Writer writer, } } + /** + * Writes a {@link QueryResult} to the specified writer. Nothing is written for + * results of Boolean queries (not even a linebreak). + * + * @param queryResult the {@link QueryResult} to write; this result must be + * based on the query literal specified in the constructor + * @throws IOException if a problem occurred in writing + */ public void write(QueryResult queryResult) throws IOException { boolean first = true; for (Entry entry : firstIndex.entrySet()) { @@ -77,9 +98,37 @@ public void write(QueryResult queryResult) throws IOException { writer.write(" -> "); serializer.writeTerm(queryResult.getTerms().get(entry.getValue())); } - if (first) { - writer.write("true"); + resultCount++; + if (!first) { + writer.write("\n"); } - writer.write("\n"); } + + /** + * Returns the number of results written so far. + * + * @return number of results + */ + public int getResultCount() { + return resultCount; + } + + /** + * Returns true if the query has had any results. + * + * @return true if query result is not empty + */ + public boolean hadResults() { + return resultCount != 0; + } + + /** + * Returns true if the query is boolean, i.e., has no answer variables. + * + * @return true if query is boolean + */ + public boolean isBooleanQuery() { + return firstIndex.size() == 0; + } + } From 7a9c6d180f2cb1c4f7ca3d587c02e9dfa81da5dc Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 10:54:22 +0200 Subject: [PATCH 0709/1003] add space before final . this prevents some parse errors in the current code (which should eventually be fixed in the parser) --- .../org/semanticweb/rulewerk/client/shell/CommandReader.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java index f3aeb72e6..48a55eeea 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java @@ -71,7 +71,7 @@ public Command readCommand() { readLine = "@" + readLine; } if (readLine.charAt(readLine.length() - 1) != '.') { - readLine = readLine + "."; + readLine = readLine + " ."; } try { From 39b1e72178d64a9c461cda589ef61363baba8f8e Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 11:23:17 +0200 Subject: [PATCH 0710/1003] support COUNT and csv export --- .../commands/QueryCommandInterpreter.java | 130 ++++++++++++++---- 1 file changed, 105 insertions(+), 25 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java index fa6cb3f7a..2589ec056 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java @@ -30,44 +30,117 @@ import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.api.Terms; import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.Correctness; import org.semanticweb.rulewerk.core.reasoner.LiteralQueryResultPrinter; +import org.semanticweb.rulewerk.core.reasoner.QueryAnswerCount; import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; import org.semanticweb.rulewerk.core.reasoner.Timer; public class QueryCommandInterpreter implements CommandInterpreter { public static Term KEYWORD_LIMIT = Expressions.makeAbstractConstant("LIMIT"); + public static Term KEYWORD_COUNT = Expressions.makeAbstractConstant("COUNT"); + public static Term KEYWORD_TOFILE = Expressions.makeAbstractConstant("EXPORTCSV"); + + private PositiveLiteral queryLiteral; + private int limit; + private boolean doCount; + private String csvFile; @Override public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + processArguments(command.getArguments()); + + if (doCount) { + printCountQueryResults(interpreter); + } else if (csvFile == null) { + printQueryResults(interpreter); + } else { + exportQueryResults(interpreter); + } + } + + @Override + public String getHelp(String commandName) { + return "Usage: @" + commandName + " [COUNT] [LIMIT ] [EXPORTCSV ] .\n" + + " query literal: positive literal; may use ?queryVariables and ?existentialVariables\n" + + " limit: maximal number of results to be shown\n" + + " filename: string path to CSV file for exporting query results"; + } - List arguments = command.getArguments(); - PositiveLiteral literal; + @Override + public String getSynopsis() { + return "print or export query results"; + } + + private void processArguments(List arguments) throws CommandExecutionException { + int pos = 0; + limit = -1; + doCount = false; + csvFile = null; + + if (arguments.size() > 0 && KEYWORD_COUNT.equals(arguments.get(0).fromTerm().orElse(null))) { + doCount = true; + pos++; + } - if (arguments.size() > 0 && arguments.get(0).fromPositiveLiteral().isPresent()) { - literal = arguments.get(0).fromPositiveLiteral().get(); + if (arguments.size() > pos && arguments.get(pos).fromPositiveLiteral().isPresent()) { + queryLiteral = arguments.get(pos).fromPositiveLiteral().get(); + pos++; } else { - throw new CommandExecutionException("First argument must be a query literal."); + throw new CommandExecutionException("A query literal must be given."); } - int limit = -1; - if (arguments.size() == 3 && KEYWORD_LIMIT.equals(arguments.get(1).fromTerm().orElse(null)) - && arguments.get(2).fromTerm().isPresent()) { - try { - limit = Terms.extractInt(arguments.get(2).fromTerm().get()); - } catch (IllegalArgumentException e) { - throw new CommandExecutionException("Invalid limit given: " + arguments.get(3).fromTerm().get()); + while (arguments.size() > pos) { + if (arguments.size() > pos + 1 && KEYWORD_LIMIT.equals(arguments.get(pos).fromTerm().orElse(null)) + && arguments.get(pos + 1).fromTerm().isPresent()) { + try { + limit = Terms.extractInt(arguments.get(pos + 1).fromTerm().get()); + pos += 2; + } catch (IllegalArgumentException e) { + throw new CommandExecutionException( + "Invalid limit given: " + arguments.get(pos + 1).fromTerm().get()); + } + } else if (arguments.size() > pos + 1 && KEYWORD_TOFILE.equals(arguments.get(pos).fromTerm().orElse(null)) + && arguments.get(pos + 1).fromTerm().isPresent()) { + try { + csvFile = Terms.extractString(arguments.get(pos + 1).fromTerm().get()); + pos += 2; + } catch (IllegalArgumentException e) { + throw new CommandExecutionException( + "Invalid filename given: " + arguments.get(pos + 1).fromTerm().get()); + } + } else { + throw new CommandExecutionException("Unrecognized arguments"); } - } else if (arguments.size() != 1) { - throw new CommandExecutionException("Unrecognized arguments"); } + } - LiteralQueryResultPrinter printer = new LiteralQueryResultPrinter(literal, interpreter.getWriter(), + private void printCountQueryResults(Interpreter interpreter) throws CommandExecutionException { + if (limit != -1) { + throw new CommandExecutionException("LIMIT not supported with COUNT"); + } + if (csvFile != null) { + throw new CommandExecutionException("COUNT results cannot be exported to CSV"); + } + + Timer timer = new Timer("query"); + timer.start(); + QueryAnswerCount count = interpreter.getReasoner().countQueryAnswers(queryLiteral); + timer.stop(); + + interpreter.printNormal(String.valueOf(count.getCount()) + "\n"); + interpreter.printNormal("Answered in " + timer.getTotalCpuTime() / 1000000 + "ms."); + interpreter.printNormal(" This result is " + count.getCorrectness() + ".\n"); + } + + private void printQueryResults(Interpreter interpreter) throws CommandExecutionException { + LiteralQueryResultPrinter printer = new LiteralQueryResultPrinter(queryLiteral, interpreter.getWriter(), interpreter.getKnowledgeBase().getPrefixDeclarationRegistry()); Timer timer = new Timer("query"); timer.start(); - try (final QueryResultIterator answers = interpreter.getReasoner().answerQuery(literal, true)) { + try (final QueryResultIterator answers = interpreter.getReasoner().answerQuery(queryLiteral, true)) { while (printer.getResultCount() != limit && answers.hasNext()) { printer.write(answers.next()); } @@ -86,15 +159,22 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } } - @Override - public String getHelp(String commandName) { - return "Usage: @" + commandName + " [LIMIT ] .\n" - + " query literal: positive literal; may use ?queryVariables and ?existentialVariables\n" - + " limit: maximal number of results to be shown"; - } + private void exportQueryResults(Interpreter interpreter) throws CommandExecutionException { + if (limit != -1) { + throw new CommandExecutionException("LIMIT not supported for CSV export"); + } - @Override - public String getSynopsis() { - return "print results to queries"; + Timer timer = new Timer("query"); + timer.start(); + Correctness correctness; + try { + correctness = interpreter.getReasoner().exportQueryAnswersToCsv(queryLiteral, csvFile, true); + } catch (IOException e) { + throw new CommandExecutionException(e.getMessage(), e); + } + timer.stop(); + + interpreter.printNormal("Written query result file in " + timer.getTotalCpuTime() / 1000000 + "ms."); + interpreter.printNormal(" This result is " + correctness + ".\n"); } } From f83768fc6df73a1648695ae00983c674a19c9f1d Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 11:25:25 +0200 Subject: [PATCH 0711/1003] correct help message --- .../semanticweb/rulewerk/commands/QueryCommandInterpreter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java index 2589ec056..63fb5bea9 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java @@ -63,7 +63,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio @Override public String getHelp(String commandName) { return "Usage: @" + commandName + " [COUNT] [LIMIT ] [EXPORTCSV ] .\n" - + " query literal: positive literal; may use ?queryVariables and ?existentialVariables\n" + + " query literal: positive literal, possibly with ?queryVariables\n" + " limit: maximal number of results to be shown\n" + " filename: string path to CSV file for exporting query results"; } From 75040d32a42717ee0f189ec197a5658465084490 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 12:17:09 +0200 Subject: [PATCH 0712/1003] improved messages --- .../commands/LoadCommandInterpreter.java | 32 ++++++------------- .../commands/ReasonCommandInterpreter.java | 2 +- 2 files changed, 11 insertions(+), 23 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index 7b12e755e..c615bd08a 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -24,7 +24,6 @@ import java.io.FileNotFoundException; import org.semanticweb.rulewerk.core.model.api.Command; -import org.semanticweb.rulewerk.core.model.api.Terms; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; @@ -32,28 +31,17 @@ public class LoadCommandInterpreter implements CommandInterpreter { @Override public void run(Command command, Interpreter interpreter) throws CommandExecutionException { - if (command.getArguments().size() == 1) { - String fileName; - try { - fileName = Terms.extractString( - command.getArguments().get(0).fromTerm().orElseThrow(() -> new CommandExecutionException( - "Expected string for file name, but did not find a term."))); - } catch (IllegalArgumentException e) { - throw new CommandExecutionException("Failed to convert term given for file name to string."); - } - try { - FileInputStream fileInputStream = new FileInputStream(fileName); - RuleParser.parseInto(interpreter.getKnowledgeBase(), fileInputStream); - } catch (FileNotFoundException e) { - throw new CommandExecutionException(e.getMessage(), e); - } catch (ParsingException e) { - interpreter.getWriter().println("Error parsing file: " + e.getMessage()); - } - - } else { - throw new CommandExecutionException(getHelp(command.getName())); + Interpreter.validateArgumentCount(command, 1); + String fileName = Interpreter.extractStringArgument(command, 0, "filename"); + + try { + FileInputStream fileInputStream = new FileInputStream(fileName); + RuleParser.parseInto(interpreter.getKnowledgeBase(), fileInputStream); + } catch (FileNotFoundException e) { + throw new CommandExecutionException(e.getMessage(), e); + } catch (ParsingException e) { + interpreter.getWriter().println("Error parsing file: " + e.getMessage()); } - } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java index fe0b9f580..59927008c 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java @@ -45,7 +45,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio throw new CommandExecutionException(e.getMessage(), e); } timer.stop(); - interpreter.getWriter().println("... finished in " + timer.getTotalCpuTime() / 1000000 + "ms."); + interpreter.getWriter().println("... finished in " + timer.getTotalWallTime() / 1000000 + "ms (" + timer.getTotalCpuTime() / 1000000 + "ms CPU time)."); } @Override From 0be1d8a0404e05b41ecbb7bcbf3146d441f8b4af Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 12:17:38 +0200 Subject: [PATCH 0713/1003] support exporting infernces and kbs --- .../commands/ExportCommandInterpreter.java | 95 +++++++++++++++++++ .../rulewerk/commands/Interpreter.java | 8 +- 2 files changed, 100 insertions(+), 3 deletions(-) create mode 100644 rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java new file mode 100644 index 000000000..b125893a0 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java @@ -0,0 +1,95 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.io.Writer; +import java.nio.charset.StandardCharsets; + +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.Timer; + +public class ExportCommandInterpreter implements CommandInterpreter { + + static final String TASK_KB = "KB"; + static final String TASK_INFERENCES = "INFERENCES"; + + @Override + public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + Interpreter.validateArgumentCount(command, 2); + + String task = Interpreter.extractNameArgument(command, 0, "task"); + String fileName = Interpreter.extractStringArgument(command, 1, "filename"); + + if (TASK_KB.equals(task)) { + exportKb(interpreter, fileName); + } else if (TASK_INFERENCES.equals(task)) { + exportInferences(interpreter, fileName); + } else { + throw new CommandExecutionException( + "Unknown task " + task + ". Should be " + TASK_KB + " or " + TASK_INFERENCES); + } + + } + + @Override + public String getHelp(String commandName) { + return "Usage: @" + commandName + " TASK \"filename\" .\n" // + + " TASK: what to export; can be KB or INFERENCES\n" // + + " \"filename\": string path export file (suggested extension: .rls)"; + } + + @Override + public String getSynopsis() { + return "export knowledgebase or inferences to a Rulewerk file"; + } + + private void exportInferences(Interpreter interpreter, String fileName) throws CommandExecutionException { + Timer timer = new Timer("export"); + Correctness correctness; + try (Writer writer = new OutputStreamWriter(new FileOutputStream(fileName), StandardCharsets.UTF_8)) { + timer.start(); + correctness = interpreter.getReasoner().writeInferences(writer); + timer.stop(); + } catch (IOException e) { + throw new CommandExecutionException(e.getMessage(), e); + } + + interpreter.printNormal("Exported all inferences in " + timer.getTotalWallTime() / 1000000 + "ms (" + timer.getTotalCpuTime() / 1000000 + "ms CPU time)."); + interpreter.printNormal(" This result is " + correctness + ".\n"); + } + + private void exportKb(Interpreter interpreter, String fileName) throws CommandExecutionException { + Timer timer = new Timer("export"); + try (Writer writer = new OutputStreamWriter(new FileOutputStream(fileName), StandardCharsets.UTF_8)) { + timer.start(); + interpreter.getKnowledgeBase().writeKnowledgeBase(writer); + timer.stop(); + } catch (IOException e) { + throw new CommandExecutionException(e.getMessage(), e); + } + interpreter.printNormal("Exported knowledge base in " + timer.getTotalWallTime() / 1000000 + "ms (" + timer.getTotalCpuTime() / 1000000 + "ms CPU time).\n"); + } + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index 724b61271..bf7afefbe 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -50,7 +50,8 @@ public class Interpreter { final LinkedHashMap commandInterpreters = new LinkedHashMap<>(); - public Interpreter(final Reasoner reasoner, final StyledPrinter printer, final ParserConfiguration parserConfiguration) { + public Interpreter(final Reasoner reasoner, final StyledPrinter printer, + final ParserConfiguration parserConfiguration) { this.reasoner = reasoner; this.printer = printer; this.parserConfiguration = parserConfiguration; @@ -154,6 +155,7 @@ private void registerDefaultCommandInterpreters() { this.registerCommandInterpreter("setprefix", new SetPrefixCommandInterpreter()); this.registerCommandInterpreter("reason", new ReasonCommandInterpreter()); this.registerCommandInterpreter("query", new QueryCommandInterpreter()); + this.registerCommandInterpreter("export", new ExportCommandInterpreter()); this.registerCommandInterpreter("showkb", new ShowKbCommandInterpreter()); } @@ -197,8 +199,8 @@ public static String extractNameArgument(final Command command, final int index, } } - public static PositiveLiteral extractPositiveLiteralArgument(final Command command, final int index, final String parameterName) - throws CommandExecutionException { + public static PositiveLiteral extractPositiveLiteralArgument(final Command command, final int index, + final String parameterName) throws CommandExecutionException { return command.getArguments().get(index).fromPositiveLiteral() .orElseThrow(() -> getArgumentTypeError(index, "literal", parameterName)); } From 5fde93ea8b5fd54b223cd2fa0e938b818b002e43 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 14:01:49 +0200 Subject: [PATCH 0714/1003] Use Writer rather than PrintWriter --- .../rulewerk/client/shell/Shell.java | 5 ++- .../client/shell/TerminalStyledPrinter.java | 1 + .../commands/AssertCommandInterpreter.java | 2 +- .../rulewerk/commands/Interpreter.java | 4 +-- .../commands/LoadCommandInterpreter.java | 2 +- .../commands/ReasonCommandInterpreter.java | 6 ++-- .../RemoveSourceCommandInterpreter.java | 6 ++-- .../commands/RetractCommandInterpreter.java | 2 +- .../commands/SimpleStyledPrinter.java | 32 ++++++++++++------- .../rulewerk/commands/StyledPrinter.java | 16 +++++----- 10 files changed, 43 insertions(+), 33 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index 9a2cd08d0..a36caba3c 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -51,7 +51,7 @@ public void run(final CommandReader commandReader) { try { command = commandReader.readCommand(); } catch (final Exception e) { - interpreter.getWriter().println("Unexpected error: " + e.getMessage()); + interpreter.printNormal("Unexpected error: " + e.getMessage() + "\n"); e.printStackTrace(); continue; } @@ -60,12 +60,11 @@ public void run(final CommandReader commandReader) { try { this.interpreter.runCommand(command); } catch (final CommandExecutionException e) { - interpreter.getWriter().println("Error: " + e.getMessage()); + interpreter.printNormal("Error: " + e.getMessage() + "\n"); } } } interpreter.printSection("Existing Rulewerk shell ... bye.\n\n"); - interpreter.getWriter().flush(); } public void exitShell() { diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/TerminalStyledPrinter.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/TerminalStyledPrinter.java index 8e77422f5..d481f2c3a 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/TerminalStyledPrinter.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/TerminalStyledPrinter.java @@ -68,5 +68,6 @@ public PrintWriter getWriter() { private void printStyled(String string, AttributedStyle attributedStyle) { AttributedString attributedString = new AttributedString(string, attributedStyle); getWriter().print(attributedString.toAnsi(terminal)); + getWriter().flush(); } } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java index 99e1c90e7..2e5602cd0 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java @@ -53,7 +53,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } } - interpreter.getWriter().println("Asserted " + factCount + " fact(s) and " + ruleCount + " rule(s)."); + interpreter.printNormal("Asserted " + factCount + " fact(s) and " + ruleCount + " rule(s).\n"); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index bf7afefbe..41756b5ea 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -2,7 +2,7 @@ import java.io.ByteArrayInputStream; import java.io.InputStream; -import java.io.PrintWriter; +import java.io.Writer; /*- * #%L @@ -121,7 +121,7 @@ public ParserConfiguration getParserConfiguration() { return this.parserConfiguration; } - public PrintWriter getWriter() { + public Writer getWriter() { return this.printer.getWriter(); } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index c615bd08a..403cea74d 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -40,7 +40,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } catch (FileNotFoundException e) { throw new CommandExecutionException(e.getMessage(), e); } catch (ParsingException e) { - interpreter.getWriter().println("Error parsing file: " + e.getMessage()); + interpreter.printNormal("Error parsing file: " + e.getMessage() + "\n"); } } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java index 59927008c..4b53cecda 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java @@ -34,8 +34,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio throw new CommandExecutionException("This command supports no arguments."); } - interpreter.getWriter().println("Loading and materializing inferences ..."); - interpreter.getWriter().flush(); + interpreter.printNormal("Loading and materializing inferences ...\n"); Timer timer = new Timer("reasoning"); timer.start(); @@ -45,7 +44,8 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio throw new CommandExecutionException(e.getMessage(), e); } timer.stop(); - interpreter.getWriter().println("... finished in " + timer.getTotalWallTime() / 1000000 + "ms (" + timer.getTotalCpuTime() / 1000000 + "ms CPU time)."); + interpreter.printNormal("... finished in " + timer.getTotalWallTime() / 1000000 + "ms (" + + timer.getTotalCpuTime() / 1000000 + "ms CPU time).\n"); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java index 8da1513a8..afa1d2fa8 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java @@ -47,9 +47,9 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio if (dataSource != null) { DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, dataSource); if (interpreter.getKnowledgeBase().removeStatement(dataSourceDeclaration) > 0) { - interpreter.getWriter().println("Removed specified data source declaration."); + interpreter.printNormal("Removed specified data source declaration.\n"); } else { - interpreter.getWriter().println("Specified data source declaration not found in knowledge base."); + interpreter.printNormal("Specified data source declaration not found in knowledge base.\n"); } } else { int count = 0; @@ -60,7 +60,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio count++; } } - interpreter.getWriter().println("Removed " + count + " matching data source declaration(s)."); + interpreter.printNormal("Removed " + count + " matching data source declaration(s).\n"); } } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java index 5680ae27b..c44ae848b 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java @@ -50,7 +50,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } } - interpreter.getWriter().println("Retracted " + factCount + " fact(s) and " + ruleCount + " rule(s)."); + interpreter.printNormal("Retracted " + factCount + " fact(s) and " + ruleCount + " rule(s).\n"); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SimpleStyledPrinter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SimpleStyledPrinter.java index 3c91a2218..1ba22dfe0 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SimpleStyledPrinter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SimpleStyledPrinter.java @@ -1,5 +1,7 @@ package org.semanticweb.rulewerk.commands; +import java.io.IOException; + /*- * #%L * Rulewerk command execution support @@ -20,7 +22,7 @@ * #L% */ -import java.io.PrintWriter; +import java.io.Writer; /** * Simple implementation of {@link StyledPrinter} based on an arbitrary @@ -31,40 +33,48 @@ */ public class SimpleStyledPrinter implements StyledPrinter { - final PrintWriter printWriter; + final Writer writer; - public SimpleStyledPrinter(final PrintWriter printWriter) { - this.printWriter = printWriter; + public SimpleStyledPrinter(final Writer writer) { + this.writer = writer; } @Override public void printNormal(String string) { - printWriter.print(string); + write(string); } @Override public void printSection(String string) { - printWriter.print(string); + write(string); } @Override public void printEmph(String string) { - printWriter.print(string); + write(string); } @Override public void printCode(String string) { - printWriter.print(string); + write(string); } @Override public void printImportant(String string) { - printWriter.print(string); + write(string); } @Override - public PrintWriter getWriter() { - return printWriter; + public Writer getWriter() { + return writer; + } + + private void write(String string) { + try { + writer.write(string); + } catch (IOException e) { + throw new RuntimeException(e.getMessage(), e); + } } } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/StyledPrinter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/StyledPrinter.java index a3d73df78..8e642b597 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/StyledPrinter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/StyledPrinter.java @@ -20,20 +20,20 @@ * #L% */ -import java.io.PrintWriter; +import java.io.Writer; public interface StyledPrinter { - + void printNormal(String string); - + void printSection(String string); - + void printEmph(String string); - + void printCode(String string); - + void printImportant(String string); - - PrintWriter getWriter(); + + Writer getWriter(); } From 99ee054d95466329c0801b321e058c6043cadd06 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 15:04:12 +0200 Subject: [PATCH 0715/1003] change how command help is printed --- .../shell/commands/ExitCommandInterpreter.java | 14 +++++++------- .../commands/AddSourceCommandInterpreter.java | 6 +++--- .../commands/AssertCommandInterpreter.java | 6 +++--- .../rulewerk/commands/CommandInterpreter.java | 9 ++++----- .../commands/ExportCommandInterpreter.java | 12 +++++++----- .../rulewerk/commands/HelpCommandInterpreter.java | 9 +++++---- .../rulewerk/commands/LoadCommandInterpreter.java | 4 ++-- .../rulewerk/commands/QueryCommandInterpreter.java | 11 ++++++----- .../commands/ReasonCommandInterpreter.java | 4 ++-- .../commands/RemoveSourceCommandInterpreter.java | 6 +++--- .../commands/RetractCommandInterpreter.java | 6 +++--- .../commands/SetPrefixCommandInterpreter.java | 4 ++-- .../commands/ShowKbCommandInterpreter.java | 4 ++-- 13 files changed, 49 insertions(+), 46 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java index 5c2ac4c83..ea2645279 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java @@ -25,18 +25,18 @@ import org.semanticweb.rulewerk.client.shell.Shell; import org.semanticweb.rulewerk.commands.CommandExecutionException; import org.semanticweb.rulewerk.commands.CommandInterpreter; +import org.semanticweb.rulewerk.commands.Interpreter; import org.semanticweb.rulewerk.core.model.api.Command; public class ExitCommandInterpreter implements CommandInterpreter { public static final Command EXIT_COMMAND = new Command(ExitCommandName.exit.toString(), new ArrayList<>()); - public static enum ExitCommandName - { + public static enum ExitCommandName { exit; public static boolean isExitCommand(final String commandName) { - for(final ExitCommandName name: values()) { + for (final ExitCommandName name : values()) { if (name.toString().equals(commandName)) { return true; } @@ -44,16 +44,16 @@ public static boolean isExitCommand(final String commandName) { return false; } } - + final Shell shell; - + public ExitCommandInterpreter(Shell shell) { this.shell = shell; } @Override - public String getHelp(final String commandName) { - return "Usage: " + commandName + "."; + public void printHelp(final String commandName, Interpreter interpreter) { + interpreter.printNormal("Usage: " + commandName + ".\n"); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java index f0182e3dd..5fe8936cd 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java @@ -52,11 +52,11 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } @Override - public String getHelp(String commandName) { - return "Usage: @" + commandName + " []: .\n" + public void printHelp(String commandName, Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " []: .\n" + " [] : the name of the predicate and its arity\n" + " : a fact specifying a source declaration\n\n" - + "Note that every predicate can have multiple sources."; + + "Note that every predicate can have multiple sources.\n"); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java index 2e5602cd0..bf194e079 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java @@ -57,10 +57,10 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } @Override - public String getHelp(String commandName) { - return "Usage: @" + commandName + " ()+ .\n" + public void printHelp(String commandName, Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " ()+ .\n" + " fact or rule: statement(s) to be added to the knowledge base\n" - + "Reasoning needs to be invoked after finishing addition of statements."; + + "Reasoning needs to be invoked after finishing addition of statements.\n"); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandInterpreter.java index 7959a3376..ff2c1f154 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandInterpreter.java @@ -40,13 +40,12 @@ public interface CommandInterpreter { void run(Command command, Interpreter interpreter) throws CommandExecutionException; /** - * Return a text that describes command use and parameters, using the given + * Prints a text that describes command use and parameters, using the given * command name. The output should start with a "Usage:" line, followed by - * single-space-indented parameter descriptions. - * - * @return help message + * single-space-indented parameter descriptions, and it should end with a + * newline. */ - String getHelp(String commandName); + void printHelp(String commandName, Interpreter interpreter); /** * Returns a short line describing the purpose of the command. diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java index b125893a0..c6f518f13 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java @@ -54,10 +54,10 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } @Override - public String getHelp(String commandName) { - return "Usage: @" + commandName + " TASK \"filename\" .\n" // + public void printHelp(String commandName, Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " TASK \"filename\" .\n" // + " TASK: what to export; can be KB or INFERENCES\n" // - + " \"filename\": string path export file (suggested extension: .rls)"; + + " \"filename\": string path export file (suggested extension: .rls)\n"); } @Override @@ -76,7 +76,8 @@ private void exportInferences(Interpreter interpreter, String fileName) throws C throw new CommandExecutionException(e.getMessage(), e); } - interpreter.printNormal("Exported all inferences in " + timer.getTotalWallTime() / 1000000 + "ms (" + timer.getTotalCpuTime() / 1000000 + "ms CPU time)."); + interpreter.printNormal("Exported all inferences in " + timer.getTotalWallTime() / 1000000 + "ms (" + + timer.getTotalCpuTime() / 1000000 + "ms CPU time)."); interpreter.printNormal(" This result is " + correctness + ".\n"); } @@ -89,7 +90,8 @@ private void exportKb(Interpreter interpreter, String fileName) throws CommandEx } catch (IOException e) { throw new CommandExecutionException(e.getMessage(), e); } - interpreter.printNormal("Exported knowledge base in " + timer.getTotalWallTime() / 1000000 + "ms (" + timer.getTotalCpuTime() / 1000000 + "ms CPU time).\n"); + interpreter.printNormal("Exported knowledge base in " + timer.getTotalWallTime() / 1000000 + "ms (" + + timer.getTotalCpuTime() / 1000000 + "ms CPU time).\n"); } } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java index 292a50b2a..431e25205 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java @@ -47,18 +47,19 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio if (interpreter.commandInterpreters.containsKey(helpCommand)) { interpreter.printCode("@" + helpCommand); interpreter.printNormal(": " + interpreter.commandInterpreters.get(helpCommand).getSynopsis() + "\n"); - interpreter.printNormal(interpreter.commandInterpreters.get(helpCommand).getHelp(helpCommand) + "\n"); + interpreter.commandInterpreters.get(helpCommand).printHelp(helpCommand, interpreter); } else { interpreter.printNormal("Command '" + helpCommand + "' not known.\n"); } } else { - interpreter.printNormal(getHelp(command.getName())); + printHelp(command.getName(), interpreter); } } @Override - public String getHelp(String commandName) { - return "Usage: @" + commandName + " [command name] .\n" + "\t command name: command to get detailed help for"; + public void printHelp(String commandName, Interpreter interpreter) { + interpreter.printNormal( + "Usage: @" + commandName + " [command name] .\n" + "\t command name: command to get detailed help for"); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index 403cea74d..feeea25c2 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -45,8 +45,8 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } @Override - public String getHelp(String commandName) { - return "Usage: @" + commandName + " \n" + " file: path to a Rulewerk rls file"; + public void printHelp(String commandName, Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " \n" + " file: path to a Rulewerk rls file\n"); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java index 63fb5bea9..310505656 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java @@ -61,11 +61,12 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } @Override - public String getHelp(String commandName) { - return "Usage: @" + commandName + " [COUNT] [LIMIT ] [EXPORTCSV ] .\n" - + " query literal: positive literal, possibly with ?queryVariables\n" - + " limit: maximal number of results to be shown\n" - + " filename: string path to CSV file for exporting query results"; + public void printHelp(String commandName, Interpreter interpreter) { + interpreter.printNormal( + "Usage: @" + commandName + " [COUNT] [LIMIT ] [EXPORTCSV ] .\n" + + " query literal: positive literal, possibly with ?queryVariables\n" + + " limit: maximal number of results to be shown\n" + + " filename: string path to CSV file for exporting query results\n"); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java index 4b53cecda..6d56aeee5 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java @@ -49,8 +49,8 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } @Override - public String getHelp(String commandName) { - return "Usage: @" + commandName + " ."; + public void printHelp(String commandName, Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " .\n"); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java index afa1d2fa8..9a7c606a4 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java @@ -66,11 +66,11 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } @Override - public String getHelp(String commandName) { - return "Usage: @" + commandName + " []: .\n" + public void printHelp(String commandName, Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " []: .\n" + " [] : the name of the predicate and its arity\n" + " (optional): a fact specifying a source declaration\n\n" - + "Note that every predicate can have multiple sources."; + + "Note that every predicate can have multiple sources.\n"); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java index c44ae848b..9acdbb4a3 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java @@ -54,10 +54,10 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } @Override - public String getHelp(String commandName) { - return "Usage: @" + commandName + " ()+ .\n" + public void printHelp(String commandName, Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " ()+ .\n" + " fact or rule: statement(s) to be removed from the knowledge base\n" - + "Reasoning needs to be invoked after finishing the removal of statements."; + + "Reasoning needs to be invoked after finishing the removal of statements.\n"); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java index ed31fb400..c713f7789 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java @@ -40,8 +40,8 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } @Override - public String getHelp(String commandName) { - return "Usage: @" + commandName + " : ."; + public void printHelp(String commandName, Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " : .\n"); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java index b854e0b4d..138f3ca48 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java @@ -37,8 +37,8 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } @Override - public String getHelp(String commandName) { - return "Usage: @" + commandName + "."; + public void printHelp(String commandName, Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " .\n"); } @Override From 67a2b7b48d828af15b754b5f83a0aab3fc24938b Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 16:01:09 +0200 Subject: [PATCH 0716/1003] remove unused constructor --- .../rulewerk/commands/CommandExecutionException.java | 4 ---- 1 file changed, 4 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandExecutionException.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandExecutionException.java index 9b9a5c6b0..9d4fcce4b 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandExecutionException.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandExecutionException.java @@ -29,10 +29,6 @@ public class CommandExecutionException extends RulewerkException { */ private static final long serialVersionUID = 1479091500621334935L; - public CommandExecutionException(Throwable cause) { - super(cause); - } - public CommandExecutionException(String message, Throwable cause) { super(message, cause); } From 11b6d32dd9916ec6e649640d9744897daffc214b Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 17:17:23 +0200 Subject: [PATCH 0717/1003] fix documentation --- .../semanticweb/rulewerk/core/reasoner/KnowledgeBase.java | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java index fda90958d..6936811e7 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -25,7 +25,6 @@ import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; -import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Writer; import java.nio.charset.StandardCharsets; @@ -581,9 +580,9 @@ public String unresolveAbsoluteIri(String iri) { } /** - * Serialise the KnowledgeBase to the {@link OutputStream}. + * Serialise the KnowledgeBase to the {@link Writer}. * - * @param writer the {@link OutputStream} to serialise to. + * @param writer the {@link Writer} to serialise to. * * @throws IOException if an I/O error occurs while writing to given output * stream From d71b4e362c28091a30bf45a73a46fa97b1dd3ef8 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 18:26:52 +0200 Subject: [PATCH 0718/1003] extract method to open files for testing --- .../rulewerk/commands/ExportCommandInterpreter.java | 7 ++----- .../org/semanticweb/rulewerk/commands/Interpreter.java | 7 +++++++ 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java index c6f518f13..4ead3798d 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java @@ -20,11 +20,8 @@ * #L% */ -import java.io.FileOutputStream; import java.io.IOException; -import java.io.OutputStreamWriter; import java.io.Writer; -import java.nio.charset.StandardCharsets; import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.reasoner.Correctness; @@ -68,7 +65,7 @@ public String getSynopsis() { private void exportInferences(Interpreter interpreter, String fileName) throws CommandExecutionException { Timer timer = new Timer("export"); Correctness correctness; - try (Writer writer = new OutputStreamWriter(new FileOutputStream(fileName), StandardCharsets.UTF_8)) { + try (Writer writer = interpreter.getFileWriter(fileName)) { timer.start(); correctness = interpreter.getReasoner().writeInferences(writer); timer.stop(); @@ -83,7 +80,7 @@ private void exportInferences(Interpreter interpreter, String fileName) throws C private void exportKb(Interpreter interpreter, String fileName) throws CommandExecutionException { Timer timer = new Timer("export"); - try (Writer writer = new OutputStreamWriter(new FileOutputStream(fileName), StandardCharsets.UTF_8)) { + try (Writer writer = interpreter.getFileWriter(fileName)) { timer.start(); interpreter.getKnowledgeBase().writeKnowledgeBase(writer); timer.stop(); diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index 41756b5ea..d6dc0fecd 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -1,7 +1,10 @@ package org.semanticweb.rulewerk.commands; import java.io.ByteArrayInputStream; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; import java.io.InputStream; +import java.io.OutputStreamWriter; import java.io.Writer; /*- @@ -205,4 +208,8 @@ public static PositiveLiteral extractPositiveLiteralArgument(final Command comma .orElseThrow(() -> getArgumentTypeError(index, "literal", parameterName)); } + public Writer getFileWriter(String fileName) throws FileNotFoundException { + return new OutputStreamWriter(new FileOutputStream(fileName), StandardCharsets.UTF_8); + } + } From 1ba2d701aa8e81575f99ff25abc2d569e2245fa7 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 18:27:08 +0200 Subject: [PATCH 0719/1003] fix usage message --- .../semanticweb/rulewerk/commands/HelpCommandInterpreter.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java index 431e25205..7707137e2 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java @@ -58,8 +58,8 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio @Override public void printHelp(String commandName, Interpreter interpreter) { - interpreter.printNormal( - "Usage: @" + commandName + " [command name] .\n" + "\t command name: command to get detailed help for"); + interpreter.printNormal("Usage: @" + commandName + " [command name] .\n" // + + "\t command name: command to get detailed help for\n"); } @Override From 436d10e963b96906a0494e35c3a3a021666f84af Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 18:28:11 +0200 Subject: [PATCH 0720/1003] more unit tests --- .../AddSourceCommandInterpreterTest.java | 120 +++++++ .../AssertCommandInterpreterTest.java | 95 ++++++ .../ExportCommandInterpreterTest.java | 156 +++++++++ .../commands/HelpCommandInterpreterTest.java | 117 +++++++ .../rulewerk/commands/InterpreterTest.java | 103 ++++++ .../commands/QueryCommandInterpreterTest.java | 313 ++++++++++++++++++ .../ReasonCommandInterpreterTest.java | 85 +++++ .../RemoveSourceCommandInterpreterTest.java | 176 ++++++++++ .../RetractCommandInterpreterTest.java | 108 ++++++ .../ShowKbCommandInterpreterTest.java | 97 ++++++ 10 files changed, 1370 insertions(+) create mode 100644 rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreterTest.java create mode 100644 rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreterTest.java create mode 100644 rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreterTest.java create mode 100644 rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreterTest.java create mode 100644 rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/InterpreterTest.java create mode 100644 rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreterTest.java create mode 100644 rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreterTest.java create mode 100644 rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreterTest.java create mode 100644 rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreterTest.java create mode 100644 rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreterTest.java diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreterTest.java new file mode 100644 index 000000000..e56c4ea3d --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreterTest.java @@ -0,0 +1,120 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.StringWriter; +import java.util.List; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class AddSourceCommandInterpreterTest { + + @Test + public void correctUse_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter + .parseCommand("@addsource p[1] : sparql(, \"?x\", \"?x

    \") ."); + interpreter.runCommand(command); + List facts = interpreter.getKnowledgeBase().getFacts(); + List rules = interpreter.getKnowledgeBase().getRules(); + List dataSourceDeclarations = interpreter.getKnowledgeBase().getDataSourceDeclarations(); + + assertEquals("addsource", command.getName()); + assertEquals(2, command.getArguments().size()); + assertTrue(command.getArguments().get(0).fromTerm().isPresent()); + assertTrue(command.getArguments().get(1).fromPositiveLiteral().isPresent()); + + assertTrue(facts.isEmpty()); + assertTrue(rules.isEmpty()); + assertEquals(1, dataSourceDeclarations.size()); + assertTrue(dataSourceDeclarations.get(0).getDataSource() instanceof SparqlQueryResultDataSource); + } + + @Test(expected = CommandExecutionException.class) + public void wrongFirstArgumentType_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@addsource \"string\" p(a)."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongSecondArgumentType_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@addsource p[1]: \"string\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongSecondArgumentUnknownSource_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@addsource p[1]: unknown(a) ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongSecondArgumentWrongAritySource_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@addsource p[1]: load-rdf(\"file.nt\") ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCount_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@addsource p[2]: p(a) p(b) ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new AddSourceCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new AddSourceCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreterTest.java new file mode 100644 index 000000000..0d6f01a78 --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreterTest.java @@ -0,0 +1,95 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.StringWriter; +import java.util.Arrays; +import java.util.List; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class AssertCommandInterpreterTest { + + @Test + public void correctUse_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@assert p(a) q(?X) :- r(?X) ."); + interpreter.runCommand(command); + List facts = interpreter.getKnowledgeBase().getFacts(); + List rules = interpreter.getKnowledgeBase().getRules(); + List dataSourceDeclarations = interpreter.getKnowledgeBase().getDataSourceDeclarations(); + + assertEquals("assert", command.getName()); + assertEquals(2, command.getArguments().size()); + assertTrue(command.getArguments().get(0).fromPositiveLiteral().isPresent()); + assertTrue(command.getArguments().get(1).fromRule().isPresent()); + + Literal literal = command.getArguments().get(0).fromPositiveLiteral().get(); + Rule rule = command.getArguments().get(1).fromRule().get(); + + assertEquals(Arrays.asList(literal), facts); + assertEquals(Arrays.asList(rule), rules); + assertTrue(dataSourceDeclarations.isEmpty()); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentTerm_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@assert \"string\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentNonFact_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@assert p(?X) ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new AssertCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new AssertCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreterTest.java new file mode 100644 index 000000000..52207d1bf --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreterTest.java @@ -0,0 +1,156 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.StringWriter; +import java.io.Writer; + +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class ExportCommandInterpreterTest { + + @Test + public void correctUseKb_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + StringWriter fileWriter = new StringWriter(); + Interpreter origInterpreter = InterpreterTest.getMockInterpreter(writer); + Interpreter interpreter = Mockito.spy(origInterpreter); + Mockito.doReturn(fileWriter).when(interpreter).getFileWriter(Mockito.eq("test.rls")); + Predicate predicate = Expressions.makePredicate("p", 1); + Term term = Expressions.makeAbstractConstant("a"); + Fact fact = Expressions.makeFact(predicate, term); + interpreter.getKnowledgeBase().addStatement(fact); + + Command command = interpreter.parseCommand("@export KB \"test.rls\" ."); + interpreter.runCommand(command); + + StringWriter anotherWriter = new StringWriter(); + interpreter.getKnowledgeBase().writeKnowledgeBase(anotherWriter); + + assertEquals("export", command.getName()); + assertEquals(2, command.getArguments().size()); + assertEquals(anotherWriter.toString(), fileWriter.toString()); + } + + @Test(expected = CommandExecutionException.class) + public void correctUseKbIoException_failse() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter origInterpreter = InterpreterTest.getMockInterpreter(writer); + Interpreter interpreter = Mockito.spy(origInterpreter); + Mockito.doThrow(FileNotFoundException.class).when(interpreter).getFileWriter(Mockito.eq("test.rls")); + + Command command = interpreter.parseCommand("@export KB \"test.rls\" ."); + interpreter.runCommand(command); + } + + @Test + public void correctUseInferences_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + StringWriter fileWriter = new StringWriter(); + Interpreter origInterpreter = InterpreterTest.getMockInterpreter(writer); + Interpreter interpreter = Mockito.spy(origInterpreter); + Mockito.doReturn(fileWriter).when(interpreter).getFileWriter(Mockito.eq("test.rls")); + Mockito.when(interpreter.getReasoner().writeInferences(Mockito.any(Writer.class))) + .thenReturn(Correctness.SOUND_BUT_INCOMPLETE); + + Command command = interpreter.parseCommand("@export INFERENCES \"test.rls\" ."); + interpreter.runCommand(command); + + assertEquals("export", command.getName()); + assertEquals(2, command.getArguments().size()); + assertTrue(writer.toString().contains(Correctness.SOUND_BUT_INCOMPLETE.toString())); + } + + @Test(expected = CommandExecutionException.class) + public void correctUseInferencesIoException_fails() + throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter origInterpreter = InterpreterTest.getMockInterpreter(writer); + Interpreter interpreter = Mockito.spy(origInterpreter); + Mockito.doThrow(FileNotFoundException.class).when(interpreter).getFileWriter(Mockito.eq("test.rls")); + + Command command = interpreter.parseCommand("@export INFERENCES \"test.rls\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void unknonwTask_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@export UNKNOWN \"file.csv\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongFirstArgumentType_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@export \"string\" \"file.rls\"."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongSecondArgumentType_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@export KB 123 ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCount_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@export KB \"file.rls\" more ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new ExportCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new ExportCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreterTest.java new file mode 100644 index 000000000..8a89c1cea --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreterTest.java @@ -0,0 +1,117 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.IOException; +import java.io.StringWriter; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class HelpCommandInterpreterTest { + + @Test + public void correctUse_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@help ."); + interpreter.runCommand(command); + + String output = writer.toString(); + for (String commandName : interpreter.getRegisteredCommands()) { + assertTrue(output.contains("@" + commandName)); + } + } + + @Test + public void correctUseWithCommand_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@help query."); + interpreter.runCommand(command); + // Nothing much to test here. + assertTrue(writer.toString().length() > 0); + } + + @Test + public void wrongArgumentCount_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@help query showkb ."); + interpreter.runCommand(command); + // Nothing much to test here. + assertTrue(writer.toString().length() > 0); + } + + @Test + public void unknownCommandHelp_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@help unknowncommand ."); + interpreter.runCommand(command); + // Nothing much to test here. + assertTrue(writer.toString().length() > 0); + } + + @Test + public void wrongArgumentTypeTerm_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@help 123 ."); + interpreter.runCommand(command); + // Nothing much to test here. + assertTrue(writer.toString().length() > 0); + } + + @Test + public void wrongArgumentTypeFact_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@help p(a) ."); + interpreter.runCommand(command); + // Nothing much to test here. + assertTrue(writer.toString().length() > 0); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new HelpCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new HelpCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/InterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/InterpreterTest.java new file mode 100644 index 000000000..ffe2c5380 --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/InterpreterTest.java @@ -0,0 +1,103 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.StringWriter; +import java.io.Writer; + +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; +import org.semanticweb.rulewerk.parser.ParserConfiguration; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class InterpreterTest { + + static public Interpreter getMockInterpreter(Writer writer) { + SimpleStyledPrinter printer = new SimpleStyledPrinter(writer); + ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + KnowledgeBase knowledgeBase = new KnowledgeBase(); + Reasoner reasoner = Mockito.mock(Reasoner.class); + Mockito.when(reasoner.getKnowledgeBase()).thenReturn(knowledgeBase); + return new Interpreter(reasoner, printer, parserConfiguration); + } + + /** + * Checks the basic format of command usage instructions and verifies that the + * given command name is used (not a fixed one). + * + * @param commandInterpreter + * @param interpreter + * @param writer + */ + static public void checkHelpFormat(CommandInterpreter commandInterpreter, Interpreter interpreter, + StringWriter writer) { + commandInterpreter.printHelp("commandname", interpreter); + String result = writer.toString(); + + assertTrue(result.startsWith("Usage: @commandname ")); + assertTrue(result.endsWith("\n")); + } + + static public void checkSynopsisFormat(CommandInterpreter commandInterpreter) { + String synopsis = commandInterpreter.getSynopsis(); + assertTrue(synopsis.length() < 70); + } + + @Test + public void getters_succeed() { + StringWriter writer = new StringWriter(); + SimpleStyledPrinter printer = new SimpleStyledPrinter(writer); + ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + KnowledgeBase knowledgeBase = new KnowledgeBase(); + Reasoner reasoner = Mockito.mock(Reasoner.class); + Mockito.when(reasoner.getKnowledgeBase()).thenReturn(knowledgeBase); + Interpreter interpreter = new Interpreter(reasoner, printer, parserConfiguration); + + assertEquals(knowledgeBase, interpreter.getKnowledgeBase()); + assertEquals(reasoner, interpreter.getReasoner()); + assertEquals(writer, interpreter.getWriter()); + assertEquals(parserConfiguration, interpreter.getParserConfiguration()); + } + + @Test(expected = CommandExecutionException.class) + public void unknownCommand_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@unknown ."); + interpreter.runCommand(command); + } + + @Test(expected = ParsingException.class) + public void malformedCommand_fails() throws ParsingException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = getMockInterpreter(writer); + + interpreter.parseCommand("malformed ."); + } + +} diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreterTest.java new file mode 100644 index 000000000..a23f72b13 --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreterTest.java @@ -0,0 +1,313 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.IOException; +import java.io.StringWriter; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; + +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.QueryAnswerCount; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.QueryAnswerCountImpl; +import org.semanticweb.rulewerk.core.reasoner.implementation.QueryResultImpl; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class QueryCommandInterpreterTest { + + class TestQueryResultIterator implements QueryResultIterator { + + final Iterator results; + + public TestQueryResultIterator(List results) { + this.results = results.iterator(); + } + + @Override + public boolean hasNext() { + return results.hasNext(); + } + + @Override + public QueryResult next() { + return results.next(); + } + + @Override + public Correctness getCorrectness() { + return Correctness.SOUND_AND_COMPLETE; + } + + @Override + public void close() { + } + + } + + @Test + public void correctUseQuery_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + QueryResult r1 = new QueryResultImpl(Arrays.asList(Expressions.makeAbstractConstant("#TEST-1#"))); + QueryResult r2 = new QueryResultImpl(Arrays.asList(Expressions.makeAbstractConstant("#TEST-2#"))); + QueryResult r3 = new QueryResultImpl(Arrays.asList(Expressions.makeAbstractConstant("#TEST-3#"))); + + QueryResultIterator results = new TestQueryResultIterator(Arrays.asList(r1, r2, r3)); + + Mockito.when(interpreter.getReasoner().answerQuery(Mockito.any(), Mockito.eq(true))).thenReturn(results); + + Command command = interpreter.parseCommand("@query p(?X) LIMIT 2 ."); + interpreter.runCommand(command); + // correct operation largely verified by not throwing an exception on the + // previous line, since only very few calls to the reasoner are not mocked + String output = writer.toString(); + + assertEquals("query", command.getName()); + assertEquals(3, command.getArguments().size()); + assertTrue(output.contains("#TEST-1#")); + assertTrue(output.contains("#TEST-2#")); + assertFalse(output.contains("#TEST-3#")); + assertTrue(output.contains(Correctness.SOUND_AND_COMPLETE.toString())); + } + + @Test + public void correctUseBooleanQueryTrue_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + QueryResult r1 = new QueryResultImpl(Arrays.asList(Expressions.makeAbstractConstant("TEST-1"))); + QueryResult r2 = new QueryResultImpl(Arrays.asList(Expressions.makeAbstractConstant("#TEST-2#"))); + QueryResult r3 = new QueryResultImpl(Arrays.asList(Expressions.makeAbstractConstant("#TEST-3#"))); + + QueryResultIterator results = new TestQueryResultIterator(Arrays.asList(r1, r2, r3)); + + Mockito.when(interpreter.getReasoner().answerQuery(Mockito.any(), Mockito.eq(true))).thenReturn(results); + + Command command = interpreter.parseCommand("@query p(TEST-1) LIMIT 2 ."); + interpreter.runCommand(command); + // correct operation largely verified by not throwing an exception on the + // previous line, since only very few calls to the reasoner are not mocked + String output = writer.toString(); + + assertEquals("query", command.getName()); + assertEquals(3, command.getArguments().size()); + assertFalse(output.contains("TEST-1")); + assertFalse(output.contains("#TEST-2#")); + assertFalse(output.contains("#TEST-3#")); + assertTrue(output.startsWith("true")); + assertTrue(output.contains(Correctness.SOUND_AND_COMPLETE.toString())); + } + + @Test + public void correctUseBooleanQueryFalse_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + QueryResultIterator results = new TestQueryResultIterator(Arrays.asList()); + + Mockito.when(interpreter.getReasoner().answerQuery(Mockito.any(), Mockito.eq(true))).thenReturn(results); + + Command command = interpreter.parseCommand("@query p(TEST-1) LIMIT 2 ."); + interpreter.runCommand(command); + // correct operation largely verified by not throwing an exception on the + // previous line, since only very few calls to the reasoner are not mocked + String output = writer.toString(); + + assertEquals("query", command.getName()); + assertEquals(3, command.getArguments().size()); + assertTrue(output.startsWith("false")); + assertTrue(output.contains(Correctness.SOUND_AND_COMPLETE.toString())); + } + + @Test + public void correctUseCount_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + QueryAnswerCount queryAnswerCount = new QueryAnswerCountImpl(Correctness.SOUND_AND_COMPLETE, 42); + Mockito.when(interpreter.getReasoner().countQueryAnswers(Mockito.any(), Mockito.eq(true))) + .thenReturn(queryAnswerCount); + Mockito.when(interpreter.getReasoner().countQueryAnswers(Mockito.any())).thenReturn(queryAnswerCount); + + Command command = interpreter.parseCommand("@query COUNT p(?X) ."); + interpreter.runCommand(command); + // correct operation largely verified by not throwing an exception on the + // previous line, since only very few calls to the reasoner are not mocked + + assertEquals("query", command.getName()); + assertEquals(2, command.getArguments().size()); + assertTrue(writer.toString().startsWith("42\n")); + assertTrue(writer.toString().contains(Correctness.SOUND_AND_COMPLETE.toString())); + } + + @Test + public void correctUseExport_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Mockito.when(interpreter.getReasoner().exportQueryAnswersToCsv(Mockito.any(), Mockito.eq("file.csv"), + Mockito.anyBoolean())).thenReturn(Correctness.SOUND_BUT_INCOMPLETE); + + Command command = interpreter.parseCommand("@query p(?X) EXPORTCSV \"file.csv\" ."); + interpreter.runCommand(command); + // correct operation largely verified by not throwing an exception on the + // previous line, since only very few calls to the reasoner are not mocked + + assertEquals("query", command.getName()); + assertEquals(3, command.getArguments().size()); + assertTrue(writer.toString().contains(Correctness.SOUND_BUT_INCOMPLETE.toString())); + } + + @Test(expected = CommandExecutionException.class) + public void exportIoError_fails() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Mockito.when(interpreter.getReasoner().exportQueryAnswersToCsv(Mockito.any(), Mockito.eq("file.csv"), + Mockito.anyBoolean())).thenThrow(IOException.class); + + Command command = interpreter.parseCommand("@query p(?X) EXPORTCSV \"file.csv\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCountZero_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@query ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentNoLiteral_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@query COUNT LIMIT 10 ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCountWithLimit_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@query COUNT p(?X) LIMIT 10 ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCountWithExportFile_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@query COUNT p(?X) EXPORTCSV \"file.csv\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentWrongLimitTerm_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@query p(?X) LIMIT \"10\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentWrongLimitNoTerm_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@query p(?X) LIMIT p(a) ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentMissingLimit_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@query p(?X) LIMIT ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentWrongExportFileTerm_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@query p(?X) EXPORTCSV 123 ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentWrongExportFileNoTerm_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@query p(?X) EXPORTCSV p(a) ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentMissingExportFile_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@query p(?X) EXPORTCSV ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentExportWithLimit_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@query p(?X) LIMIT 10 EXPORTCSV \"test.csv\" ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new QueryCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new QueryCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreterTest.java new file mode 100644 index 000000000..025540d02 --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreterTest.java @@ -0,0 +1,85 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.IOException; +import java.io.StringWriter; + +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class ReasonCommandInterpreterTest { + + @Test + public void correctUse_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + Mockito.when(interpreter.getReasoner().getCorrectness()).thenReturn(Correctness.SOUND_BUT_INCOMPLETE); + Mockito.when(interpreter.getReasoner().reason()).thenAnswer(I -> { + Mockito.when(interpreter.getReasoner().getCorrectness()).thenReturn(Correctness.SOUND_AND_COMPLETE); + return true; + }); + + Command command = interpreter.parseCommand("@reason ."); + interpreter.runCommand(command); + + assertEquals(Correctness.SOUND_AND_COMPLETE, interpreter.getReasoner().getCorrectness()); + } + + @Test(expected = CommandExecutionException.class) + public void correctUseReasonerException_fails() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + Mockito.when(interpreter.getReasoner().reason()).thenThrow(IOException.class); + + Command command = interpreter.parseCommand("@reason ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCount_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@reason p(?X) ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new ReasonCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new ReasonCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreterTest.java new file mode 100644 index 000000000..c5532acf9 --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreterTest.java @@ -0,0 +1,176 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.StringWriter; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.Arrays; +import java.util.List; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class RemoveSourceCommandInterpreterTest { + + @Test + public void correctUse_succeeds() throws ParsingException, CommandExecutionException, MalformedURLException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + Predicate predicate = Expressions.makePredicate("p", 1); + DataSource dataSource = new SparqlQueryResultDataSource(new URL("http://example.org"), "?x", "?x

    "); + DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, dataSource); + interpreter.getKnowledgeBase().addStatement(dataSourceDeclaration); + + Command command = interpreter + .parseCommand("@delsource p[1] : sparql(, \"?x\", \"?x

    \") ."); + interpreter.runCommand(command); + List facts = interpreter.getKnowledgeBase().getFacts(); + List rules = interpreter.getKnowledgeBase().getRules(); + List dataSourceDeclarations = interpreter.getKnowledgeBase().getDataSourceDeclarations(); + + assertEquals("delsource", command.getName()); + assertEquals(2, command.getArguments().size()); + assertTrue(command.getArguments().get(0).fromTerm().isPresent()); + assertTrue(command.getArguments().get(1).fromPositiveLiteral().isPresent()); + + assertTrue(facts.isEmpty()); + assertTrue(rules.isEmpty()); + assertTrue(dataSourceDeclarations.isEmpty()); + } + + @Test + public void correctUseNothingRemoved_succeeds() + throws ParsingException, CommandExecutionException, MalformedURLException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + Predicate predicate = Expressions.makePredicate("p", 1); + DataSource dataSource = new SparqlQueryResultDataSource(new URL("http://example.org"), "?x", "?x

    "); + DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, dataSource); + interpreter.getKnowledgeBase().addStatement(dataSourceDeclaration); + + Command command = interpreter + .parseCommand("@delsource another[1] : sparql(, \"?x\", \"?x

    \") ."); + interpreter.runCommand(command); + List facts = interpreter.getKnowledgeBase().getFacts(); + List rules = interpreter.getKnowledgeBase().getRules(); + List dataSourceDeclarations = interpreter.getKnowledgeBase().getDataSourceDeclarations(); + + assertTrue(facts.isEmpty()); + assertTrue(rules.isEmpty()); + assertEquals(Arrays.asList(dataSourceDeclaration), dataSourceDeclarations); + } + + @Test + public void correctUseRemoveAll_succeeds() + throws ParsingException, CommandExecutionException, MalformedURLException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + Predicate predicate = Expressions.makePredicate("p", 1); + Predicate predicate2 = Expressions.makePredicate("q", 1); + DataSource dataSource = new SparqlQueryResultDataSource(new URL("http://example.org"), "?x", "?x

    "); + DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, dataSource); + DataSourceDeclaration dataSourceDeclaration2 = new DataSourceDeclarationImpl(predicate2, dataSource); + interpreter.getKnowledgeBase().addStatement(dataSourceDeclaration); + interpreter.getKnowledgeBase().addStatement(dataSourceDeclaration2); + + Command command = interpreter.parseCommand("@delsource p[1] ."); + interpreter.runCommand(command); + List facts = interpreter.getKnowledgeBase().getFacts(); + List rules = interpreter.getKnowledgeBase().getRules(); + List dataSourceDeclarations = interpreter.getKnowledgeBase().getDataSourceDeclarations(); + + assertTrue(facts.isEmpty()); + assertTrue(rules.isEmpty()); + assertEquals(Arrays.asList(dataSourceDeclaration2), dataSourceDeclarations); + } + + @Test(expected = CommandExecutionException.class) + public void wrongFirstArgumentType_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@dellsource \"string\" p(a)."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongSecondArgumentType_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@delsource p[1]: \"string\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongSecondArgumentUnknownSource_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@delsource p[1]: unknown(a) ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCount_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@delsource p[2]: p(a) p(b) ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCountZero_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@delsource ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new RemoveSourceCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new RemoveSourceCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreterTest.java new file mode 100644 index 000000000..3381ac9ec --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreterTest.java @@ -0,0 +1,108 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.StringWriter; +import java.util.Arrays; +import java.util.List; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class RetractCommandInterpreterTest { + + @Test + public void correctUse_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + Term a = Expressions.makeAbstractConstant("a"); + Term x = Expressions.makeUniversalVariable("X"); + Predicate p = Expressions.makePredicate("p", 1); + Predicate q = Expressions.makePredicate("q", 1); + Predicate r = Expressions.makePredicate("r", 1); + Fact fact = Expressions.makeFact(p, a); + Fact fact2 = Expressions.makeFact(q, a); + PositiveLiteral headLiteral = Expressions.makePositiveLiteral(q, x); + PositiveLiteral bodyLiteral = Expressions.makePositiveLiteral(r, x); + Rule rule = Expressions.makeRule(headLiteral, bodyLiteral); + interpreter.getKnowledgeBase().addStatement(fact); + interpreter.getKnowledgeBase().addStatement(fact2); + interpreter.getKnowledgeBase().addStatement(rule); + + Command command = interpreter.parseCommand("@retract p(a) q(?X) :- r(?X) ."); + interpreter.runCommand(command); + List facts = interpreter.getKnowledgeBase().getFacts(); + List rules = interpreter.getKnowledgeBase().getRules(); + List dataSourceDeclarations = interpreter.getKnowledgeBase().getDataSourceDeclarations(); + + assertEquals("retract", command.getName()); + assertEquals(2, command.getArguments().size()); + assertTrue(command.getArguments().get(0).fromPositiveLiteral().isPresent()); + assertTrue(command.getArguments().get(1).fromRule().isPresent()); + + assertEquals(Arrays.asList(fact2), facts); + assertTrue(rules.isEmpty()); + assertTrue(dataSourceDeclarations.isEmpty()); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentTerm_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@retract \"string\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentNonFact_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@retract p(?X) ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new RetractCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new RetractCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreterTest.java new file mode 100644 index 000000000..5ddd3b231 --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreterTest.java @@ -0,0 +1,97 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.IOException; +import java.io.StringWriter; +import java.io.Writer; + +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class ShowKbCommandInterpreterTest { + + @Test + public void correctUse_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + Predicate predicate = Expressions.makePredicate("p", 1); + Term term = Expressions.makeAbstractConstant("a"); + Fact fact = Expressions.makeFact(predicate, term); + interpreter.getKnowledgeBase().addStatement(fact); + + Command command = interpreter.parseCommand("@showkb ."); + interpreter.runCommand(command); + + StringWriter anotherWriter = new StringWriter(); + interpreter.getKnowledgeBase().writeKnowledgeBase(anotherWriter); + + assertEquals("showkb", command.getName()); + assertEquals(0, command.getArguments().size()); + assertEquals(writer.toString(), anotherWriter.toString()); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCount_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@showkb p(?X) ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void ioError_fails() throws ParsingException, CommandExecutionException, IOException { + Writer writer = Mockito.mock(Writer.class); + Mockito.doThrow(IOException.class).when(writer).write(Mockito.anyString()); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + Predicate predicate = Expressions.makePredicate("p", 1); + Term term = Expressions.makeAbstractConstant("a"); + Fact fact = Expressions.makeFact(predicate, term); + interpreter.getKnowledgeBase().addStatement(fact); + + Command command = interpreter.parseCommand("@showkb ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new ShowKbCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new ShowKbCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} From 0e367daf33465e8aa2aa1dd4b8a20409f1de1db2 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 21:37:27 +0200 Subject: [PATCH 0721/1003] yet more tests --- .../rulewerk/commands/Interpreter.java | 19 ++++ .../commands/LoadCommandInterpreter.java | 8 +- .../rulewerk/commands/InterpreterTest.java | 36 ++++++++ .../SetPrefixCommandInterpreterTest.java | 91 +++++++++++++++++++ 4 files changed, 150 insertions(+), 4 deletions(-) create mode 100644 rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreterTest.java diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index d6dc0fecd..d75e8e235 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -1,6 +1,7 @@ package org.semanticweb.rulewerk.commands; import java.io.ByteArrayInputStream; +import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.InputStream; @@ -208,8 +209,26 @@ public static PositiveLiteral extractPositiveLiteralArgument(final Command comma .orElseThrow(() -> getArgumentTypeError(index, "literal", parameterName)); } + /** + * Returns a Writer to write to the specified file. + * + * @param fileName + * @return + * @throws FileNotFoundException + */ public Writer getFileWriter(String fileName) throws FileNotFoundException { return new OutputStreamWriter(new FileOutputStream(fileName), StandardCharsets.UTF_8); } + /** + * Returns an InputStream to read from the specified file. + * + * @param fileName + * @return + * @throws FileNotFoundException + */ + public InputStream getFileInputStream(String fileName) throws FileNotFoundException { + return new FileInputStream(fileName); + } + } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index feeea25c2..b8b74b0b8 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -20,8 +20,8 @@ * #L% */ -import java.io.FileInputStream; import java.io.FileNotFoundException; +import java.io.InputStream; import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.parser.ParsingException; @@ -35,12 +35,12 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio String fileName = Interpreter.extractStringArgument(command, 0, "filename"); try { - FileInputStream fileInputStream = new FileInputStream(fileName); - RuleParser.parseInto(interpreter.getKnowledgeBase(), fileInputStream); + InputStream inputStream = interpreter.getFileInputStream(fileName); + RuleParser.parseInto(interpreter.getKnowledgeBase(), inputStream); } catch (FileNotFoundException e) { throw new CommandExecutionException(e.getMessage(), e); } catch (ParsingException e) { - interpreter.printNormal("Error parsing file: " + e.getMessage() + "\n"); + throw new CommandExecutionException("Error parsing file: " + e.getMessage(), e); } } diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/InterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/InterpreterTest.java index ffe2c5380..74e517e33 100644 --- a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/InterpreterTest.java +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/InterpreterTest.java @@ -27,6 +27,7 @@ import org.junit.Test; import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; @@ -100,4 +101,39 @@ public void malformedCommand_fails() throws ParsingException { interpreter.parseCommand("malformed ."); } + @Test + public void prefixesAreUsed_succeeds() throws ParsingException, PrefixDeclarationException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + interpreter.getKnowledgeBase().getPrefixDeclarationRegistry().setPrefixIri("eg:", "http://example.org/"); + + Command command = interpreter.parseCommand("@somecommand eg:test ."); + + assertEquals(1, command.getArguments().size()); + assertTrue(command.getArguments().get(0).fromTerm().isPresent()); + assertEquals("http://example.org/test", command.getArguments().get(0).fromTerm().get().getName()); + } + + @Test + public void print_succeeds() { + StringWriter writer = new StringWriter(); + SimpleStyledPrinter printer = Mockito.spy(new SimpleStyledPrinter(writer)); + ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + Reasoner reasoner = Mockito.mock(Reasoner.class); + Interpreter interpreter = new Interpreter(reasoner, printer, parserConfiguration); + + interpreter.printCode("Code"); + interpreter.printNormal("Normal"); + interpreter.printEmph("Emph"); + interpreter.printSection("Section"); + interpreter.printImportant("Important"); + + Mockito.verify(printer).printCode("Code"); + Mockito.verify(printer).printNormal("Normal"); + Mockito.verify(printer).printEmph("Emph"); + Mockito.verify(printer).printSection("Section"); + Mockito.verify(printer).printImportant("Important"); + + } + } diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreterTest.java new file mode 100644 index 000000000..9190c8407 --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreterTest.java @@ -0,0 +1,91 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.StringWriter; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class SetPrefixCommandInterpreterTest { + + @Test + public void correctUse_succeeds() throws ParsingException, CommandExecutionException, PrefixDeclarationException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@setprefix eg: ."); + interpreter.runCommand(command); + + assertEquals("setprefix", command.getName()); + assertEquals(2, command.getArguments().size()); + assertTrue(command.getArguments().get(0).fromTerm().isPresent()); + assertTrue(command.getArguments().get(1).fromTerm().isPresent()); + + assertEquals("http://example.org/", interpreter.getKnowledgeBase().getPrefixIri("eg:")); + } + + @Test(expected = CommandExecutionException.class) + public void wrongFirstArgument_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@setprefix 123 ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongSecondArgument_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@setprefix pre: 123 ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCount_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@setprefix ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new SetPrefixCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new SetPrefixCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} From 308009586877611b4d9d6fa9ede3181a83b26c94 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 21:42:36 +0200 Subject: [PATCH 0722/1003] include commands in coverage --- coverage/pom.xml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/coverage/pom.xml b/coverage/pom.xml index b65b563fa..76ca8882d 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -44,6 +44,11 @@ rulewerk-parser ${project.version} + + ${project.groupId} + rulewerk-commands + ${project.version} + ${project.groupId} rulewerk-client From dcad863218eb750c3de89e0f98f15dd1670ca2fa Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 22:08:28 +0200 Subject: [PATCH 0723/1003] update documentation files --- README.md | 5 +++-- RELEASE-NOTES.md | 10 ++++++++++ 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 47b38c0f1..5d183be1f 100644 --- a/README.md +++ b/README.md @@ -22,7 +22,7 @@ The current release of Rulewerk is version 0.6.0. The easiest way of using the l Previous to version `0.6.0`, *rulewerk* project name was *vlog4j*. Older versions released under name *vlog4j* have `org.semanticweb.vlog4j` and `vlog4j-core`, the latest version being version `0.5.0`. -You need to use Java 1.8 or above. Available modules include: +You need to use Java 1.8 or above. Available source modules include: * **rulewerk-core**: essential data models for rules and facts, and essential reasoner functionality * **rulewerk-parser**: support for processing knowledge bases in [Rulewerk syntax](https://github.com/knowsys/rulewerk/wiki/Rule-syntax-grammar) @@ -30,9 +30,10 @@ You need to use Java 1.8 or above. Available modules include: * **rulewerk-rdf**: support for reading from RDF files in Java (not required for loading RDF directly during reasoning) * **rulewerk-owlapi**: support for converting rules from OWL ontology, loaded with the OWL API * **rulewerk-client**: stand-alone application that builds a [command-line client](https://github.com/knowsys/rulewerk/wiki/Standalone-client) for Rulewerk. +* **rulewerk-commands**: support for running commands, as done by the client * **rulewerk-vlog**: support for using [VLog](https://github.com/karmaresearch/vlog) as a reasoning backend for Rulewerk. -The released packages use vlog-base, which packages system-dependent binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows: +The released packages use vlog-base, which packages system-dependent binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for Linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows: * Run [build-vlog-library.sh](https://github.com/knowsys/rulewerk/blob/master/build-vlog-library.sh) or execute the commands in this file manually. This will compile a local jar file on your system, copy it to ```./rulewerk-vlog/lib/jvlog-local.jar```, and install the new jar locally in Maven in place of the distributed version of vlog-base. * Run ```mvn install``` to test if the setup works diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 79f9edb8a..71a54cce9 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -5,11 +5,21 @@ Rulewerk v0.7.0 --------------- New features: +* New interactive Rulewerk shell for rule reasoning from the command line client * Significant speedup in iterating over query results +* New class `LiteralQueryResultPrinter` for pretty-printing query results Other improvements: +* Improved serialization of knowledge bases (using namespaces) +* Simple (non-IRI, namespace-less) predicate names can now include - and _ * InMemoryGraphAnalysisExample now counts proper triangles using negation to avoid "triangles" where two or more edges are the same. + +Breaking changes: +* The `Serializer` class in the core package has been replaced by a new implementation + with a completely different interface. +* The methods `getSerialization` that were present in most syntax objects have been removed. Use `toString()` instead for simple serializations, or invoke a custom Serializer. +* The `DataSource` interface requires a new method to be implemented. Rulewerk v0.6.0 --------------- From 0166159718db10f9a5605ca61cefe56b8cb762c0 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 22:10:31 +0200 Subject: [PATCH 0724/1003] remove useless methods the command parsing method of Interpreter is usually preferable --- .../org/semanticweb/rulewerk/parser/RuleParser.java | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java index 146fa1085..c9a00c103 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java @@ -26,7 +26,6 @@ import java.util.List; import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; -import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; import org.semanticweb.rulewerk.core.model.api.Entity; import org.semanticweb.rulewerk.core.model.api.Fact; @@ -254,15 +253,6 @@ public static DataSourceDeclaration parseDataSourceDeclaration(final String inpu return parseDataSourceDeclaration(input, null); } - public static Command parseCommand(final String input, ParserConfiguration parserConfiguration) - throws ParsingException { - return parseSyntaxFragment(input, JavaCCParser::command, "command", parserConfiguration); - } - - public static Command parseCommand(final String input) throws ParsingException { - return parseCommand(input, null); - } - static KnowledgeBase doParse(final JavaCCParser parser) throws ParsingException { try { parser.parse(); From 73acbe6e227ed1876b6a7b315a45ec73a1caf630 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 22:22:21 +0200 Subject: [PATCH 0725/1003] display KB when starting --- .../examples/CompareWikidataDBpedia.java | 30 ++++++++++--------- 1 file changed, 16 insertions(+), 14 deletions(-) diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CompareWikidataDBpedia.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CompareWikidataDBpedia.java index d3249c93d..18a01975f 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CompareWikidataDBpedia.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CompareWikidataDBpedia.java @@ -70,28 +70,30 @@ public static void main(final String[] args) throws ParsingException, IOExceptio // Configure the SPARQL data sources and some rules to analyse results: final String rules = "" // - + "@prefix wdqs: ." // - + "@prefix dbp: ." // - + "@source dbpResult[2] : sparql(dbp:sparql, \"result,enwikipage\", '''" + dbpediaSparql + "''') ." // - + "@source wdResult[2] : sparql(wdqs:sparql, \"result,enwikipage\", '''" + wikidataSparql + "''') ." // + + "@prefix wdqs: .\n" // + + "@prefix dbp: .\n" // + + "@source dbpResult[2] : sparql(dbp:sparql, \"result,enwikipage\", '''" + dbpediaSparql + "''') .\n" // + + "@source wdResult[2] : sparql(wdqs:sparql, \"result,enwikipage\", '''" + wikidataSparql + "''') .\n" // + "% Rules:\n" // - + "inWd(?Wikipage) :- wdResult(?WdId,?Wikipage)." // - + "inDbp(?Wikipage) :- dbpResult(?DbpId,?Wikipage)." // - + "result(?Wikipage) :- inWd(?Wikipage)." // - + "result(?Wikipage) :- inDbp(?Wikipage)." // - + "match(?WdId,?DbpId) :- dbpResult(?DbpId,?Wikipage), wdResult(?WdId,?Wikipage)." - + "dbpOnly(?Wikipage) :- inDbp(?Wikipage), ~inWd(?Wikipage)." - + "wdpOnly(?WdId,?Wikipage) :- wdResult(?WdId,?Wikipage), ~inDbp(?Wikipage)." + ""; // + + "inWd(?Wikipage) :- wdResult(?WdId,?Wikipage).\n" // + + "inDbp(?Wikipage) :- dbpResult(?DbpId,?Wikipage).\n" // + + "result(?Wikipage) :- inWd(?Wikipage).\n" // + + "result(?Wikipage) :- inDbp(?Wikipage).\n" // + + "match(?WdId,?DbpId) :- dbpResult(?DbpId,?Wikipage), wdResult(?WdId,?Wikipage).\n" + + "dbpOnly(?Wikipage) :- inDbp(?Wikipage), ~inWd(?Wikipage).\n" + + "wdpOnly(?WdId,?Wikipage) :- wdResult(?WdId,?Wikipage), ~inDbp(?Wikipage).\n"; // + + System.out.println("Knowledge base used in this example:\n\n" + rules); final KnowledgeBase kb = RuleParser.parse(rules); try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - final double resultCount = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("result(?X)")) + final long resultCount = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("result(?X)")) .getCount(); - final double wdCount = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("inWd(?X)")).getCount(); - final double dbpCount = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("inDbp(?X)")).getCount(); + final long wdCount = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("inWd(?X)")).getCount(); + final long dbpCount = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("inDbp(?X)")).getCount(); System.out.println("Found " + resultCount + " matching entities overall, of which " + wdCount + " were in Wikidata and " + dbpCount + " were in DBPedia"); From 972d0894caa013ded123aee5a781bf8e2f7f9036 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 22:22:43 +0200 Subject: [PATCH 0726/1003] use new result printing facilities --- .../rulewerk/examples/ExamplesUtils.java | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java index 87745d975..8d18a35f3 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java @@ -21,6 +21,7 @@ */ import java.io.IOException; +import java.io.OutputStreamWriter; import java.util.ArrayList; import java.util.List; @@ -34,6 +35,7 @@ import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.LiteralQueryResultPrinter; import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.parser.ParsingException; @@ -88,10 +90,17 @@ public static void configureLogging() { */ public static void printOutQueryAnswers(final PositiveLiteral queryAtom, final Reasoner reasoner) { System.out.println("Answers to query " + queryAtom + " :"); + OutputStreamWriter writer = new OutputStreamWriter(System.out); + LiteralQueryResultPrinter printer = new LiteralQueryResultPrinter(queryAtom, writer, + reasoner.getKnowledgeBase().getPrefixDeclarationRegistry()); try (final QueryResultIterator answers = reasoner.answerQuery(queryAtom, true)) { - answers.forEachRemaining(answer -> System.out.println(" - " + answer)); - + while (answers.hasNext()) { + printer.write(answers.next()); + writer.flush(); + } System.out.println("Query answers are: " + answers.getCorrectness()); + } catch (IOException e) { + throw new RuntimeException(e); } System.out.println(); } From 8891a295e3fda7eb46a26d9d9e7047eef4db94c2 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 22:42:54 +0200 Subject: [PATCH 0727/1003] comment out unused code --- .../rulewerk/core/reasoner/Timer.java | 630 +++++++++--------- 1 file changed, 315 insertions(+), 315 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Timer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Timer.java index 9b555ede2..1f41efa6e 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Timer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Timer.java @@ -22,22 +22,22 @@ import java.lang.management.ManagementFactory; import java.lang.management.ThreadMXBean; -import java.util.Map; import java.util.concurrent.ConcurrentHashMap; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - /** * Class for keeping CPU and system times. The class has a number of features * that can be used to measure and aggregate times across many threads and many * methods. * + * @implNote This file originates from the ELK Reasoner, where more extensive thread-aware + * timing was required. The file contains commented out functions from that source that + * could be used to activate those features here. + * * @author Markus Kroetzsch */ public class Timer { - private static Logger LOGGER = LoggerFactory.getLogger(Timer.class); + //private static Logger LOGGER = LoggerFactory.getLogger(Timer.class); /** Flag for indicating that no times should be taken (just count runs). */ public static final int RECORD_NONE = 0x00000000; @@ -223,316 +223,316 @@ public synchronized long stop() { return totalTime; } - /** - * Print logging information for the timer. The log only shows the recorded time - * of the completed start-stop cycles. If the timer is still running, then it - * will not be stopped to add the currently measured time to the output but a - * warning will be logged. - * - */ - public void log() { - if (LOGGER.isInfoEnabled()) { - String timerLabel; - if (threadId != 0) { - timerLabel = name + " (thread " + threadId + ")"; - } else if (threadCount > 1) { - timerLabel = name + " (over " + threadCount + " threads)"; - } else { - timerLabel = name; - } - - if (todoFlags == RECORD_NONE) { - LOGGER.info("Timer " + timerLabel + " recorded " + measurements + " run(s), no times taken"); - } else { - String labels = ""; - String values = ""; - String separator; - - if ((todoFlags & RECORD_CPUTIME) != 0 && threadId != 0) { - labels += "CPU"; - values += totalCpuTime / 1000000; - separator = "/"; - } else { - separator = ""; - } - if ((todoFlags & RECORD_WALLTIME) != 0) { - labels += separator + "Wall"; - values += separator + totalWallTime / 1000000; - } - if ((todoFlags & RECORD_CPUTIME) != 0 && threadId != 0) { - labels += "/CPU avg"; - values += "/" + (float) (totalCpuTime) / measurements / 1000000; - } - if ((todoFlags & RECORD_WALLTIME) != 0) { - labels += "/Wall avg"; - values += "/" + (float) (totalWallTime) / measurements / 1000000; - } - if (threadCount > 1) { - if ((todoFlags & RECORD_CPUTIME) != 0 && threadId != 0) { - labels += "/CPU per thread"; - values += "/" + (float) (totalCpuTime) / threadCount / 1000000; - } - if ((todoFlags & RECORD_WALLTIME) != 0) { - labels += "/Wall per thread"; - values += "/" + (float) (totalWallTime) / threadCount / 1000000; - } - } - - LOGGER.info( - "Time for " + timerLabel + " for " + measurements + " run(s) " + labels + " (ms): " + values); - } - - if (isRunning) { - LOGGER.warn("Timer " + timerLabel + " logged while it was still running"); - } - } - } - - /** - * Start a timer of the given string name for all todos and the current thread. - * If no such timer exists yet, then it will be newly created. - * - * @param timerName the name of the timer - */ - public static void startNamedTimer(String timerName) { - getNamedTimer(timerName).start(); - } - - /** - * Start a timer of the given string name for the current thread. If no such - * timer exists yet, then it will be newly created. - * - * @param timerName the name of the timer - * @param todoFlags - */ - public static void startNamedTimer(String timerName, int todoFlags) { - getNamedTimer(timerName, todoFlags).start(); - } - - /** - * Start a timer of the given string name for the current thread. If no such - * timer exists yet, then it will be newly created. - * - * @param timerName the name of the timer - * @param todoFlags - * @param threadId of the thread to track, or 0 if only system clock should be - * tracked - */ - public static void startNamedTimer(String timerName, int todoFlags, long threadId) { - getNamedTimer(timerName, todoFlags, threadId).start(); - } - - /** - * Stop a timer of the given string name for all todos and the current thread. - * If no such timer exists, -1 will be returned. Otherwise the return value is - * the CPU time that was measured. - * - * @param timerName the name of the timer - * @return CPU time if timer existed and was running, and -1 otherwise - */ - public static long stopNamedTimer(String timerName) { - return stopNamedTimer(timerName, RECORD_ALL, Thread.currentThread().getId()); - } - - /** - * Stop a timer of the given string name for the current thread. If no such - * timer exists, -1 will be returned. Otherwise the return value is the CPU time - * that was measured. - * - * @param timerName the name of the timer - * @param todoFlags - * @return CPU time if timer existed and was running, and -1 otherwise - */ - public static long stopNamedTimer(String timerName, int todoFlags) { - return stopNamedTimer(timerName, todoFlags, Thread.currentThread().getId()); - } - - /** - * Stop a timer of the given string name for the given thread. If no such timer - * exists, -1 will be returned. Otherwise the return value is the CPU time that - * was measured. - * - * @param timerName the name of the timer - * @param todoFlags - * @param threadId of the thread to track, or 0 if only system clock should be - * tracked - * @return CPU time if timer existed and was running, and -1 otherwise - */ - public static long stopNamedTimer(String timerName, int todoFlags, long threadId) { - Timer key = new Timer(timerName, todoFlags, threadId); - if (registeredTimers.containsKey(key)) { - return registeredTimers.get(key).stop(); - } else { - return -1; - } - } - - /** - * Reset a timer of the given string name for all todos and the current thread. - * If no such timer exists yet, then it will be newly created. - * - * @param timerName the name of the timer - */ - public static void resetNamedTimer(String timerName) { - getNamedTimer(timerName).reset(); - } - - /** - * Reset a timer of the given string name for the current thread. If no such - * timer exists yet, then it will be newly created. - * - * @param timerName the name of the timer - * @param todoFlags - */ - public static void resetNamedTimer(String timerName, int todoFlags) { - getNamedTimer(timerName, todoFlags).reset(); - } - - /** - * Reset a timer of the given string name for the given thread. If no such timer - * exists yet, then it will be newly created. - * - * @param timerName the name of the timer - * @param todoFlags - * @param threadId of the thread to track, or 0 if only system clock should be - * tracked - */ - public static void resetNamedTimer(String timerName, int todoFlags, long threadId) { - getNamedTimer(timerName, todoFlags, threadId).reset(); - } - - /** - * Get a timer of the given string name that takes all possible times (todos) - * for the current thread. If no such timer exists yet, then it will be newly - * created. - * - * @param timerName the name of the timer - * @return timer - */ - public static Timer getNamedTimer(String timerName) { - return getNamedTimer(timerName, RECORD_ALL, Thread.currentThread().getId()); - } - - /** - * Returns all registered timers - * - * @return an iterable collection of named timers - */ - public static Iterable getNamedTimers() { - return registeredTimers.keySet(); - } - - /** - * Get a timer of the given string name and todos for the current thread. If no - * such timer exists yet, then it will be newly created. - * - * @param timerName the name of the timer - * @param todoFlags - * @return timer - */ - public static Timer getNamedTimer(String timerName, int todoFlags) { - return getNamedTimer(timerName, todoFlags, Thread.currentThread().getId()); - } - - /** - * Get a timer of the given string name for the given thread. If no such timer - * exists yet, then it will be newly created. - * - * @param timerName the name of the timer - * @param todoFlags - * @param threadId of the thread to track, or 0 if only system clock should be - * tracked - * @return timer - */ - public static Timer getNamedTimer(String timerName, int todoFlags, long threadId) { - Timer key = new Timer(timerName, todoFlags, threadId); - Timer previous = registeredTimers.putIfAbsent(key, key); - if (previous != null) { - return previous; - } - // else - return key; - } - - /** - * Collect the total times measured by all known named timers of the given name. - * - * @param timerName - * @return timer - */ - public static Timer getNamedTotalTimer(String timerName) { - long totalCpuTime = 0; - long totalSystemTime = 0; - int measurements = 0; - int threadCount = 0; - int todoFlags = RECORD_NONE; - Timer previousTimer = null; - for (Map.Entry entry : registeredTimers.entrySet()) { - if (entry.getValue().name.equals(timerName)) { - previousTimer = entry.getValue(); - threadCount += 1; - totalCpuTime += previousTimer.totalCpuTime; - totalSystemTime += previousTimer.totalWallTime; - measurements += previousTimer.measurements; - todoFlags |= previousTimer.todoFlags; - } - } - - if (threadCount == 1) { - return previousTimer; - } else { - Timer result = new Timer(timerName, todoFlags, 0); - result.totalCpuTime = totalCpuTime; - result.totalWallTime = totalSystemTime; - result.measurements = measurements; - result.threadCount = threadCount; - return result; - } - } - - public static void logAllNamedTimers(String timerName) { - for (Map.Entry entry : registeredTimers.entrySet()) { - if (entry.getValue().name.equals(timerName)) { - entry.getValue().log(); - } - } - } - - @Override - public int hashCode() { - // Jenkins hash, see http://www.burtleburtle.net/bob/hash/doobs.html and also - // http://en.wikipedia.org/wiki/Jenkins_hash_function. - int hash = name.hashCode(); - hash += (hash << 10); - hash ^= (hash >> 6); - hash += Long.valueOf(threadId).hashCode(); - hash += (hash << 10); - hash ^= (hash >> 6); - hash += Integer.valueOf(todoFlags).hashCode(); - hash += (hash << 10); - hash ^= (hash >> 6); - - hash += (hash << 3); - hash ^= (hash >> 11); - hash += (hash << 15); - return hash; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } else if (obj == null) { - return false; - } else if (getClass() != obj.getClass()) { - return false; - } else if (threadId == ((Timer) obj).threadId && todoFlags == ((Timer) obj).todoFlags - && name.equals(((Timer) obj).name)) { - return true; - } else { - return false; - } - } +// /** +// * Print logging information for the timer. The log only shows the recorded time +// * of the completed start-stop cycles. If the timer is still running, then it +// * will not be stopped to add the currently measured time to the output but a +// * warning will be logged. +// * +// */ +// public void log() { +// if (LOGGER.isInfoEnabled()) { +// String timerLabel; +// if (threadId != 0) { +// timerLabel = name + " (thread " + threadId + ")"; +// } else if (threadCount > 1) { +// timerLabel = name + " (over " + threadCount + " threads)"; +// } else { +// timerLabel = name; +// } +// +// if (todoFlags == RECORD_NONE) { +// LOGGER.info("Timer " + timerLabel + " recorded " + measurements + " run(s), no times taken"); +// } else { +// String labels = ""; +// String values = ""; +// String separator; +// +// if ((todoFlags & RECORD_CPUTIME) != 0 && threadId != 0) { +// labels += "CPU"; +// values += totalCpuTime / 1000000; +// separator = "/"; +// } else { +// separator = ""; +// } +// if ((todoFlags & RECORD_WALLTIME) != 0) { +// labels += separator + "Wall"; +// values += separator + totalWallTime / 1000000; +// } +// if ((todoFlags & RECORD_CPUTIME) != 0 && threadId != 0) { +// labels += "/CPU avg"; +// values += "/" + (float) (totalCpuTime) / measurements / 1000000; +// } +// if ((todoFlags & RECORD_WALLTIME) != 0) { +// labels += "/Wall avg"; +// values += "/" + (float) (totalWallTime) / measurements / 1000000; +// } +// if (threadCount > 1) { +// if ((todoFlags & RECORD_CPUTIME) != 0 && threadId != 0) { +// labels += "/CPU per thread"; +// values += "/" + (float) (totalCpuTime) / threadCount / 1000000; +// } +// if ((todoFlags & RECORD_WALLTIME) != 0) { +// labels += "/Wall per thread"; +// values += "/" + (float) (totalWallTime) / threadCount / 1000000; +// } +// } +// +// LOGGER.info( +// "Time for " + timerLabel + " for " + measurements + " run(s) " + labels + " (ms): " + values); +// } +// +// if (isRunning) { +// LOGGER.warn("Timer " + timerLabel + " logged while it was still running"); +// } +// } +// } +// +// /** +// * Start a timer of the given string name for all todos and the current thread. +// * If no such timer exists yet, then it will be newly created. +// * +// * @param timerName the name of the timer +// */ +// public static void startNamedTimer(String timerName) { +// getNamedTimer(timerName).start(); +// } +// +// /** +// * Start a timer of the given string name for the current thread. If no such +// * timer exists yet, then it will be newly created. +// * +// * @param timerName the name of the timer +// * @param todoFlags +// */ +// public static void startNamedTimer(String timerName, int todoFlags) { +// getNamedTimer(timerName, todoFlags).start(); +// } +// +// /** +// * Start a timer of the given string name for the current thread. If no such +// * timer exists yet, then it will be newly created. +// * +// * @param timerName the name of the timer +// * @param todoFlags +// * @param threadId of the thread to track, or 0 if only system clock should be +// * tracked +// */ +// public static void startNamedTimer(String timerName, int todoFlags, long threadId) { +// getNamedTimer(timerName, todoFlags, threadId).start(); +// } +// +// /** +// * Stop a timer of the given string name for all todos and the current thread. +// * If no such timer exists, -1 will be returned. Otherwise the return value is +// * the CPU time that was measured. +// * +// * @param timerName the name of the timer +// * @return CPU time if timer existed and was running, and -1 otherwise +// */ +// public static long stopNamedTimer(String timerName) { +// return stopNamedTimer(timerName, RECORD_ALL, Thread.currentThread().getId()); +// } +// +// /** +// * Stop a timer of the given string name for the current thread. If no such +// * timer exists, -1 will be returned. Otherwise the return value is the CPU time +// * that was measured. +// * +// * @param timerName the name of the timer +// * @param todoFlags +// * @return CPU time if timer existed and was running, and -1 otherwise +// */ +// public static long stopNamedTimer(String timerName, int todoFlags) { +// return stopNamedTimer(timerName, todoFlags, Thread.currentThread().getId()); +// } +// +// /** +// * Stop a timer of the given string name for the given thread. If no such timer +// * exists, -1 will be returned. Otherwise the return value is the CPU time that +// * was measured. +// * +// * @param timerName the name of the timer +// * @param todoFlags +// * @param threadId of the thread to track, or 0 if only system clock should be +// * tracked +// * @return CPU time if timer existed and was running, and -1 otherwise +// */ +// public static long stopNamedTimer(String timerName, int todoFlags, long threadId) { +// Timer key = new Timer(timerName, todoFlags, threadId); +// if (registeredTimers.containsKey(key)) { +// return registeredTimers.get(key).stop(); +// } else { +// return -1; +// } +// } +// +// /** +// * Reset a timer of the given string name for all todos and the current thread. +// * If no such timer exists yet, then it will be newly created. +// * +// * @param timerName the name of the timer +// */ +// public static void resetNamedTimer(String timerName) { +// getNamedTimer(timerName).reset(); +// } +// +// /** +// * Reset a timer of the given string name for the current thread. If no such +// * timer exists yet, then it will be newly created. +// * +// * @param timerName the name of the timer +// * @param todoFlags +// */ +// public static void resetNamedTimer(String timerName, int todoFlags) { +// getNamedTimer(timerName, todoFlags).reset(); +// } +// +// /** +// * Reset a timer of the given string name for the given thread. If no such timer +// * exists yet, then it will be newly created. +// * +// * @param timerName the name of the timer +// * @param todoFlags +// * @param threadId of the thread to track, or 0 if only system clock should be +// * tracked +// */ +// public static void resetNamedTimer(String timerName, int todoFlags, long threadId) { +// getNamedTimer(timerName, todoFlags, threadId).reset(); +// } +// +// /** +// * Get a timer of the given string name that takes all possible times (todos) +// * for the current thread. If no such timer exists yet, then it will be newly +// * created. +// * +// * @param timerName the name of the timer +// * @return timer +// */ +// public static Timer getNamedTimer(String timerName) { +// return getNamedTimer(timerName, RECORD_ALL, Thread.currentThread().getId()); +// } +// +// /** +// * Returns all registered timers +// * +// * @return an iterable collection of named timers +// */ +// public static Iterable getNamedTimers() { +// return registeredTimers.keySet(); +// } +// +// /** +// * Get a timer of the given string name and todos for the current thread. If no +// * such timer exists yet, then it will be newly created. +// * +// * @param timerName the name of the timer +// * @param todoFlags +// * @return timer +// */ +// public static Timer getNamedTimer(String timerName, int todoFlags) { +// return getNamedTimer(timerName, todoFlags, Thread.currentThread().getId()); +// } +// +// /** +// * Get a timer of the given string name for the given thread. If no such timer +// * exists yet, then it will be newly created. +// * +// * @param timerName the name of the timer +// * @param todoFlags +// * @param threadId of the thread to track, or 0 if only system clock should be +// * tracked +// * @return timer +// */ +// public static Timer getNamedTimer(String timerName, int todoFlags, long threadId) { +// Timer key = new Timer(timerName, todoFlags, threadId); +// Timer previous = registeredTimers.putIfAbsent(key, key); +// if (previous != null) { +// return previous; +// } +// // else +// return key; +// } +// +// /** +// * Collect the total times measured by all known named timers of the given name. +// * +// * @param timerName +// * @return timer +// */ +// public static Timer getNamedTotalTimer(String timerName) { +// long totalCpuTime = 0; +// long totalSystemTime = 0; +// int measurements = 0; +// int threadCount = 0; +// int todoFlags = RECORD_NONE; +// Timer previousTimer = null; +// for (Map.Entry entry : registeredTimers.entrySet()) { +// if (entry.getValue().name.equals(timerName)) { +// previousTimer = entry.getValue(); +// threadCount += 1; +// totalCpuTime += previousTimer.totalCpuTime; +// totalSystemTime += previousTimer.totalWallTime; +// measurements += previousTimer.measurements; +// todoFlags |= previousTimer.todoFlags; +// } +// } +// +// if (threadCount == 1) { +// return previousTimer; +// } else { +// Timer result = new Timer(timerName, todoFlags, 0); +// result.totalCpuTime = totalCpuTime; +// result.totalWallTime = totalSystemTime; +// result.measurements = measurements; +// result.threadCount = threadCount; +// return result; +// } +// } +// +// public static void logAllNamedTimers(String timerName) { +// for (Map.Entry entry : registeredTimers.entrySet()) { +// if (entry.getValue().name.equals(timerName)) { +// entry.getValue().log(); +// } +// } +// } +// +// @Override +// public int hashCode() { +// // Jenkins hash, see http://www.burtleburtle.net/bob/hash/doobs.html and also +// // http://en.wikipedia.org/wiki/Jenkins_hash_function. +// int hash = name.hashCode(); +// hash += (hash << 10); +// hash ^= (hash >> 6); +// hash += Long.valueOf(threadId).hashCode(); +// hash += (hash << 10); +// hash ^= (hash >> 6); +// hash += Integer.valueOf(todoFlags).hashCode(); +// hash += (hash << 10); +// hash ^= (hash >> 6); +// +// hash += (hash << 3); +// hash ^= (hash >> 11); +// hash += (hash << 15); +// return hash; +// } +// +// @Override +// public boolean equals(Object obj) { +// if (this == obj) { +// return true; +// } else if (obj == null) { +// return false; +// } else if (getClass() != obj.getClass()) { +// return false; +// } else if (threadId == ((Timer) obj).threadId && todoFlags == ((Timer) obj).todoFlags +// && name.equals(((Timer) obj).name)) { +// return true; +// } else { +// return false; +// } +// } protected static long getThreadCpuTime(long threadId) { if (threadId == 0) { // generally invalid From 5c6985e8159eee3a40977321457163244a342d4f Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 24 Aug 2020 09:41:05 +0200 Subject: [PATCH 0728/1003] test load command --- .../commands/LoadCommandInterpreterTest.java | 123 ++++++++++++++++++ 1 file changed, 123 insertions(+) create mode 100644 rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java new file mode 100644 index 000000000..5ed5a0f14 --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java @@ -0,0 +1,123 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.ByteArrayInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.io.StringWriter; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; + +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class LoadCommandInterpreterTest { + + @Test + public void correctUse_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + InputStream inputStream = new ByteArrayInputStream("p(a) .".getBytes(StandardCharsets.UTF_8)); + Interpreter origInterpreter = InterpreterTest.getMockInterpreter(writer); + Interpreter interpreter = Mockito.spy(origInterpreter); + Mockito.doReturn(inputStream).when(interpreter).getFileInputStream(Mockito.eq("loadtest.rls")); + + Predicate predicate = Expressions.makePredicate("p", 1); + Term term = Expressions.makeAbstractConstant("a"); + Fact fact = Expressions.makeFact(predicate, term); + + Command command = interpreter.parseCommand("@load 'loadtest.rls' ."); + interpreter.runCommand(command); + + assertEquals("load", command.getName()); + assertEquals(1, command.getArguments().size()); + assertTrue(command.getArguments().get(0).fromTerm().isPresent()); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + } + + @Test(expected = CommandExecutionException.class) + public void correctUseParseError_fails() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + InputStream inputStream = new ByteArrayInputStream("not parsable".getBytes(StandardCharsets.UTF_8)); + Interpreter origInterpreter = InterpreterTest.getMockInterpreter(writer); + Interpreter interpreter = Mockito.spy(origInterpreter); + Mockito.doReturn(inputStream).when(interpreter).getFileInputStream(Mockito.eq("loadtest.rls")); + + Command command = interpreter.parseCommand("@load 'loadtest.rls' ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void correctUseFileNotFoundError_fails() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter origInterpreter = InterpreterTest.getMockInterpreter(writer); + Interpreter interpreter = Mockito.spy(origInterpreter); + Mockito.doThrow(FileNotFoundException.class).when(interpreter).getFileInputStream(Mockito.eq("loadtest.rls")); + + Command command = interpreter.parseCommand("@load 'loadtest.rls' ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCount_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@load ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentType_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@load p(a) ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new LoadCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new LoadCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} From 6cb4f8d6d4865792a153bcf9a7f9b8f18a272173 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 24 Aug 2020 12:51:41 +0200 Subject: [PATCH 0729/1003] typo --- .../main/java/org/semanticweb/rulewerk/client/shell/Shell.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index a36caba3c..0768042b6 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -64,7 +64,7 @@ public void run(final CommandReader commandReader) { } } } - interpreter.printSection("Existing Rulewerk shell ... bye.\n\n"); + interpreter.printSection("Exiting Rulewerk shell ... bye.\n\n"); } public void exitShell() { From 42d5fdd3d76181a0247c08ee7cce5da54e9969d0 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Mon, 24 Aug 2020 18:32:43 +0200 Subject: [PATCH 0730/1003] Handle exit request from user using CTRL+D --- .../semanticweb/rulewerk/client/shell/CommandReader.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java index 48a55eeea..18400e686 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java @@ -1,5 +1,7 @@ package org.semanticweb.rulewerk.client.shell; +import org.jline.reader.EndOfFileException; + /*- * #%L * Rulewerk Client @@ -61,6 +63,10 @@ public Command readCommand() { } else { return null; // used as empty command } + } catch (final EndOfFileException e) { + // Exit request from user CTRL+D + return ExitCommandInterpreter.EXIT_COMMAND; + } readLine = readLine.trim(); From 55e4b62c263dbd9ab049259791146e7ddb62a36b Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Mon, 24 Aug 2020 19:10:06 +0200 Subject: [PATCH 0731/1003] complete only command names --- .../rulewerk/client/shell/DefaultConfiguration.java | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java index 73477a47b..e573fb54c 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java @@ -28,6 +28,8 @@ import org.jline.reader.Completer; import org.jline.reader.LineReader; import org.jline.reader.LineReaderBuilder; +import org.jline.reader.impl.completer.ArgumentCompleter; +import org.jline.reader.impl.completer.NullCompleter; import org.jline.reader.impl.completer.StringsCompleter; import org.jline.terminal.Terminal; import org.jline.terminal.TerminalBuilder; @@ -59,14 +61,17 @@ public static LineReader buildLineReader(final Terminal terminal, final Interpre return lineReader; } + private static Completer buildCompleter(final Interpreter interpreter) { final Set registeredCommandNames = interpreter.getRegisteredCommands(); final List serializedCommandNames = registeredCommandNames.stream() - .map(commandName -> "@" + commandName) - .collect(Collectors.toList()); - return new StringsCompleter(serializedCommandNames); + .map(commandName -> "@" + commandName).collect(Collectors.toList()); + final Completer commandNamesCompleter = new StringsCompleter(serializedCommandNames); + // do not complete command arguments + return new ArgumentCompleter(commandNamesCompleter, NullCompleter.INSTANCE); } + public static Terminal buildTerminal() throws IOException { return TerminalBuilder.builder().dumb(true).jansi(true).jna(false).system(true).build(); } From f308c99e98d05932952ea3b3f837794141ae0a2b Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 25 Aug 2020 11:45:59 +0200 Subject: [PATCH 0732/1003] add file name completion for @load and @export --- .../client/shell/DefaultConfiguration.java | 32 +++++++++++-------- 1 file changed, 19 insertions(+), 13 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java index e573fb54c..011a32086 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java @@ -21,16 +21,17 @@ */ import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Set; -import java.util.stream.Collectors; +import org.jline.builtins.Completers; +import org.jline.builtins.Completers.FileNameCompleter; +import org.jline.builtins.Completers.TreeCompleter; +import org.jline.builtins.Completers.TreeCompleter.Node; import org.jline.reader.Completer; import org.jline.reader.LineReader; import org.jline.reader.LineReaderBuilder; -import org.jline.reader.impl.completer.ArgumentCompleter; -import org.jline.reader.impl.completer.NullCompleter; -import org.jline.reader.impl.completer.StringsCompleter; import org.jline.terminal.Terminal; import org.jline.terminal.TerminalBuilder; import org.jline.utils.AttributedString; @@ -48,8 +49,7 @@ public static PromptProvider buildPromptProvider() { public static LineReader buildLineReader(final Terminal terminal, final Interpreter interpreter) { final LineReaderBuilder lineReaderBuilder = LineReaderBuilder.builder().terminal(terminal) - .appName("Rulewerk Shell") - .completer(buildCompleter(interpreter)) + .appName("Rulewerk Shell").completer(buildCompleter(interpreter)) // .expander(expander()) // .history(buildHistory()) // .highlighter(buildHighlighter()) @@ -61,16 +61,22 @@ public static LineReader buildLineReader(final Terminal terminal, final Interpre return lineReader; } - private static Completer buildCompleter(final Interpreter interpreter) { +// @load and @export commands require a file name as argument + final FileNameCompleter fileNameCompleter = new Completers.FileNameCompleter(); + final Set registeredCommandNames = interpreter.getRegisteredCommands(); - final List serializedCommandNames = registeredCommandNames.stream() - .map(commandName -> "@" + commandName).collect(Collectors.toList()); - final Completer commandNamesCompleter = new StringsCompleter(serializedCommandNames); - // do not complete command arguments - return new ArgumentCompleter(commandNamesCompleter, NullCompleter.INSTANCE); - } + final List nodes = new ArrayList<>(); + registeredCommandNames.stream().map(commandName -> "@" + commandName).forEach(serializedCommandName -> { + if (serializedCommandName.equals("@load") || serializedCommandName.equals("@export")) { + nodes.add(TreeCompleter.node(serializedCommandName, TreeCompleter.node(fileNameCompleter))); + } else { + nodes.add(TreeCompleter.node(serializedCommandName)); + } + }); + return new TreeCompleter(nodes); + } public static Terminal buildTerminal() throws IOException { return TerminalBuilder.builder().dumb(true).jansi(true).jna(false).system(true).build(); From 1daf9bb7ffb7b316f42da65d2c9086f3bdabe6c3 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 25 Aug 2020 13:48:54 +0200 Subject: [PATCH 0733/1003] remove file completer for @export command --- .../semanticweb/rulewerk/client/shell/DefaultConfiguration.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java index 011a32086..e9cf0438f 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java @@ -68,7 +68,7 @@ private static Completer buildCompleter(final Interpreter interpreter) { final Set registeredCommandNames = interpreter.getRegisteredCommands(); final List nodes = new ArrayList<>(); registeredCommandNames.stream().map(commandName -> "@" + commandName).forEach(serializedCommandName -> { - if (serializedCommandName.equals("@load") || serializedCommandName.equals("@export")) { + if (serializedCommandName.equals("@load")) { nodes.add(TreeCompleter.node(serializedCommandName, TreeCompleter.node(fileNameCompleter))); } else { nodes.add(TreeCompleter.node(serializedCommandName)); From e7b3d7c00d6b577e4d3e7affc7f1c1226588fb86 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 25 Aug 2020 14:22:15 +0200 Subject: [PATCH 0734/1003] typos --- .../rulewerk/commands/AddSourceCommandInterpreter.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java index 5fe8936cd..8f8a2e4b7 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java @@ -53,7 +53,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio @Override public void printHelp(String commandName, Interpreter interpreter) { - interpreter.printNormal("Usage: @" + commandName + " []: .\n" + interpreter.printNormal("Usage: @" + commandName + " []: .\n" + " [] : the name of the predicate and its arity\n" + " : a fact specifying a source declaration\n\n" + "Note that every predicate can have multiple sources.\n"); @@ -87,7 +87,7 @@ static DataSource extractDataSource(PositiveLiteral sourceDeclaration, Interpret return interpreter.getParserConfiguration() .parseDataSourceSpecificPartOfDataSourceDeclaration(sourceDeclaration); } catch (ParsingException e) { - throw new CommandExecutionException("Could not parse source declartion: " + e.getMessage()); + throw new CommandExecutionException("Could not parse source declaration: " + e.getMessage()); } } From 29214ad90705694a13e51391ef6dcfde47ddbf72 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 25 Aug 2020 14:31:18 +0200 Subject: [PATCH 0735/1003] support trident data sources --- .../DataSourceConfigurationVisitor.java | 20 +++-- .../implementation/TridentDataSource.java | 90 +++++++++++++++++++ .../parser/DefaultParserConfiguration.java | 3 + .../TridentDataSourceDeclarationHandler.java | 43 +++++++++ .../VLogDataSourceConfigurationVisitor.java | 26 +++--- 5 files changed, 166 insertions(+), 16 deletions(-) create mode 100644 rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java create mode 100644 rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/TridentDataSourceDeclarationHandler.java diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java index 9ead436c5..91c78b4e1 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java @@ -32,30 +32,38 @@ public interface DataSourceConfigurationVisitor { /** * Configure the reasoner for a {@link CsvFileDataSource}. * - * @param dataSource the data source to configure. - * @throws IOexception when an IO error occurs during configuration. + * @param dataSource the data source to configure + * @throws IOexception when an IO error occurs during configuration */ public void visit(CsvFileDataSource dataSource) throws IOException; /** * Configure the reasoner for a {@link RdfFileDataSource}. * - * @param dataSource the data source to configure. - * @throws IOexception when an IO error occurs during configuration. + * @param dataSource the data source to configure + * @throws IOexception when an IO error occurs during configuration */ public void visit(RdfFileDataSource dataSource) throws IOException; + /** + * Configure the reasoner for a {@link TridentDataSource}. + * + * @param dataSource the data source to configure + * @throws IOexception when an IO error occurs during configuration + */ + public void visit(TridentDataSource dataSource) throws IOException; + /** * Configure the reasoner for a {@link SparqlQueryResultDataSource}. * - * @param dataSource the data source to configure. + * @param dataSource the data source to configure */ public void visit(SparqlQueryResultDataSource dataSource); /** * Configure the reasoner for a {@link InMemoryDataSource}. * - * @param dataSource the data source to configure. + * @param dataSource the data source to configure */ public void visit(InMemoryDataSource dataSource); } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java new file mode 100644 index 000000000..14245678a --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java @@ -0,0 +1,90 @@ +package org.semanticweb.rulewerk.core.reasoner.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; + +/** + * Data source for loading data from a database created with the + * Trident RDF indexing and + * storage utility. This is the recommended data source for large RDF + * datasets in the VLog reasoner. Trident databases are generated from RDF input + * files in a batch process using the Trident tool. + * + * @author Markus Kroetzsch + * + */ +public class TridentDataSource implements ReasonerDataSource { + + /** + * The name of the predicate used for declarations of data sources of this type. + */ + public static final String declarationPredicateName = "trident"; + + final String filePath; + + public TridentDataSource(final String filePath) { + this.filePath = filePath; + } + + public String getPath() { + return this.filePath; + } + + @Override + public Fact getDeclarationFact() { + Predicate predicate = Expressions.makePredicate(declarationPredicateName, 1); + return Expressions.makeFact(predicate, + Expressions.makeDatatypeConstant(filePath, PrefixDeclarationRegistry.XSD_STRING)); + } + + @Override + public void accept(DataSourceConfigurationVisitor visitor) throws IOException { + visitor.visit(this); + + } + + @Override + public int hashCode() { + return this.filePath.hashCode(); + } + + @Override + public boolean equals(final Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (!(obj instanceof TridentDataSource)) { + return false; + } + final TridentDataSource other = (TridentDataSource) obj; + return this.filePath.equals(other.getPath()); + } + +} diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java index d8ce99ddb..80050e49f 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java @@ -3,6 +3,7 @@ import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.TridentDataSource; /*- * #%L @@ -27,6 +28,7 @@ import org.semanticweb.rulewerk.parser.datasources.CsvFileDataSourceDeclarationHandler; import org.semanticweb.rulewerk.parser.datasources.RdfFileDataSourceDeclarationHandler; import org.semanticweb.rulewerk.parser.datasources.SparqlQueryResultDataSourceDeclarationHandler; +import org.semanticweb.rulewerk.parser.datasources.TridentDataSourceDeclarationHandler; import org.semanticweb.rulewerk.parser.directives.ImportFileDirectiveHandler; import org.semanticweb.rulewerk.parser.directives.ImportFileRelativeDirectiveHandler; @@ -50,6 +52,7 @@ private void registerDefaultDataSources() { registerDataSource(RdfFileDataSource.declarationPredicateName, new RdfFileDataSourceDeclarationHandler()); registerDataSource(SparqlQueryResultDataSource.declarationPredicateName, new SparqlQueryResultDataSourceDeclarationHandler()); + registerDataSource(TridentDataSource.declarationPredicateName, new TridentDataSourceDeclarationHandler()); } private void registerDefaultDirectives() { diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/TridentDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/TridentDataSourceDeclarationHandler.java new file mode 100644 index 000000000..830d97c59 --- /dev/null +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/TridentDataSourceDeclarationHandler.java @@ -0,0 +1,43 @@ +package org.semanticweb.rulewerk.parser.datasources; + +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.List; + +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.reasoner.implementation.TridentDataSource; +import org.semanticweb.rulewerk.parser.ParsingException; + +/** + * Handler for parsing {@link TridentDataSource} declarations + * + * @author Markus Kroetzsch + */ +public class TridentDataSourceDeclarationHandler implements DataSourceDeclarationHandler { + @Override + public DataSource handleDataSourceDeclaration(List terms) throws ParsingException { + DataSourceDeclarationHandler.validateNumberOfArguments(terms, 1); + String fileName = DataSourceDeclarationHandler.validateStringArgument(terms.get(0), "path to Trident database"); + + return new TridentDataSource(fileName); + } +} diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java index f5396dc24..c2bc52c08 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java @@ -29,6 +29,7 @@ import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.TridentDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; public class VLogDataSourceConfigurationVisitor implements DataSourceConfigurationVisitor { @@ -37,6 +38,7 @@ public class VLogDataSourceConfigurationVisitor implements DataSourceConfigurati private static final String PREDICATE_NAME_CONFIG_LINE = "EDB%1$d_predname=%2$s\n"; private static final String DATASOURCE_TYPE_CONFIG_PARAM = "EDB%1$d_type"; private final static String FILE_DATASOURCE_TYPE_CONFIG_VALUE = "INMEMORY"; + private final static String TRIDENT_DATASOURCE_TYPE_CONFIG_VALUE = "Trident"; private static final String SPARQL_DATASOURCE_TYPE_CONFIG_VALUE = "SPARQL"; public String getConfigString() { @@ -44,11 +46,9 @@ public String getConfigString() { } protected void setFileConfigString(FileDataSource dataSource) throws IOException { - this.configString = - PREDICATE_NAME_CONFIG_LINE + - DATASOURCE_TYPE_CONFIG_PARAM + "=" + FILE_DATASOURCE_TYPE_CONFIG_VALUE + "\n" + - "EDB%1$d_param0=" + getDirCanonicalPath(dataSource) + "\n" + - "EDB%1$d_param1=" + getFileNameWithoutExtension(dataSource) + "\n"; + this.configString = PREDICATE_NAME_CONFIG_LINE + DATASOURCE_TYPE_CONFIG_PARAM + "=" + + FILE_DATASOURCE_TYPE_CONFIG_VALUE + "\n" + "EDB%1$d_param0=" + getDirCanonicalPath(dataSource) + "\n" + + "EDB%1$d_param1=" + getFileNameWithoutExtension(dataSource) + "\n"; } String getDirCanonicalPath(FileDataSource dataSource) throws IOException { @@ -72,11 +72,17 @@ public void visit(RdfFileDataSource dataSource) throws IOException { @Override public void visit(SparqlQueryResultDataSource dataSource) { - this.configString = - PREDICATE_NAME_CONFIG_LINE + - DATASOURCE_TYPE_CONFIG_PARAM + "=" + SPARQL_DATASOURCE_TYPE_CONFIG_VALUE + "\n" + - "EDB%1$d_param0=" + dataSource.getEndpoint() + "\n" + "EDB%1$d_param1=" + dataSource.getQueryVariables() + "\n" + - "EDB%1$d_param2=" + dataSource.getQueryBody() + "\n"; + this.configString = PREDICATE_NAME_CONFIG_LINE + DATASOURCE_TYPE_CONFIG_PARAM + "=" + + SPARQL_DATASOURCE_TYPE_CONFIG_VALUE + "\n" + "EDB%1$d_param0=" + dataSource.getEndpoint() + "\n" + + "EDB%1$d_param1=" + dataSource.getQueryVariables() + "\n" + "EDB%1$d_param2=" + + dataSource.getQueryBody() + "\n"; + } + + @Override + public void visit(TridentDataSource dataSource) { + this.configString = PREDICATE_NAME_CONFIG_LINE + DATASOURCE_TYPE_CONFIG_PARAM + "=" // + + TRIDENT_DATASOURCE_TYPE_CONFIG_VALUE + "\n" // + + "EDB%1$d_param0=" + dataSource.getPath() + "\n"; } @Override From 45c6a14b9aa4b3cd256bc77a1e1232d2ab1853ab Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 25 Aug 2020 14:45:42 +0200 Subject: [PATCH 0736/1003] test Trident source --- .../implementation/TridentDataSourceTest.java | 81 +++++++++++++++++++ 1 file changed, 81 insertions(+) create mode 100644 rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSourceTest.java diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSourceTest.java new file mode 100644 index 000000000..faf6d640b --- /dev/null +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSourceTest.java @@ -0,0 +1,81 @@ +package org.semanticweb.rulewerk.core.reasoner.implementation; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; + +public class TridentDataSourceTest { + + @Test(expected = NullPointerException.class) + public void nullFile_fails() throws IOException { + new TridentDataSource(null); + } + + @Test + public void get_succeeds() throws IOException { + final TridentDataSource tridentDataSource = new TridentDataSource("trident/path"); + assertEquals("trident/path", tridentDataSource.getPath()); + } + + @Test + public void getDeclarationFact_succeeds() throws IOException { + final TridentDataSource tridentDataSource = new TridentDataSource("trident/path"); + Fact fact = tridentDataSource.getDeclarationFact(); + assertEquals(TridentDataSource.declarationPredicateName, fact.getPredicate().getName()); + assertEquals(1, fact.getPredicate().getArity()); + assertEquals(Expressions.makeDatatypeConstant("trident/path", PrefixDeclarationRegistry.XSD_STRING), + fact.getArguments().get(0)); + } + + @Test + public void visit_succeeds() throws IOException { + final DataSourceConfigurationVisitor visitor = Mockito.spy(DataSourceConfigurationVisitor.class); + final TridentDataSource tridentDataSource = new TridentDataSource("trident/path"); + + tridentDataSource.accept(visitor); + + Mockito.verify(visitor).visit(tridentDataSource); + } + + @Test + public void hashEquals_succeed() throws IOException { + final TridentDataSource tridentDataSource1 = new TridentDataSource("trident/path"); + final TridentDataSource tridentDataSource2 = new TridentDataSource("trident/path"); + final TridentDataSource tridentDataSource3 = new TridentDataSource("trident/anotherpath"); + + assertEquals(tridentDataSource1, tridentDataSource2); + assertEquals(tridentDataSource1.hashCode(), tridentDataSource2.hashCode()); + assertNotEquals(tridentDataSource1, tridentDataSource3); + assertEquals(tridentDataSource1, tridentDataSource1); + assertFalse(tridentDataSource1.equals(null)); + assertFalse(tridentDataSource1.equals("trident/path")); + } +} From 91d9363513bddef9927b467c4cb551d7c1d1006f Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 25 Aug 2020 14:52:06 +0200 Subject: [PATCH 0737/1003] validate not null --- .../core/reasoner/implementation/TridentDataSource.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java index 14245678a..54cfba4a8 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java @@ -22,6 +22,7 @@ import java.io.IOException; +import org.apache.commons.lang3.Validate; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; @@ -47,6 +48,7 @@ public class TridentDataSource implements ReasonerDataSource { final String filePath; public TridentDataSource(final String filePath) { + Validate.notBlank(filePath, "Path to Trident database cannot be blank!"); this.filePath = filePath; } @@ -64,7 +66,6 @@ public Fact getDeclarationFact() { @Override public void accept(DataSourceConfigurationVisitor visitor) throws IOException { visitor.visit(this); - } @Override From 877ba5deee1b214f0ea8c2570846a29cdb420b16 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 25 Aug 2020 14:52:18 +0200 Subject: [PATCH 0738/1003] test trident source parsing --- .../parser/RuleParserDataSourceTest.java | 24 +++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java index 1a376bc3e..8e0c0abb4 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java @@ -31,6 +31,7 @@ import org.junit.Test; import org.mockito.ArgumentMatchers; +import org.semanticweb.rulewerk.core.model.api.DataSource; import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; @@ -41,6 +42,7 @@ import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.TridentDataSource; import org.semanticweb.rulewerk.parser.ParserConfiguration; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; @@ -50,6 +52,7 @@ public class RuleParserDataSourceTest { private static final String EXAMPLE_RDF_FILE_PATH = "src/main/data/input/example.nt.gz"; private static final String EXAMPLE_CSV_FILE_PATH = "src/main/data/input/example.csv"; private static final String WIKIDATA_SPARQL_ENDPOINT_URI = "https://query.wikidata.org/sparql"; + private static final String EXAMPLE_TRIDENT_PATH = "src/main/data/trident"; @Test public void testCsvSource() throws ParsingException, IOException { @@ -193,4 +196,25 @@ public void parseDataSourceDeclaration_windowsStylePathName_succeeds() throws Pa RuleParser.parseDataSourceDeclaration("@source p[1] : load-csv(\"\\\\test\\\\with\\\\backslashes.csv\") ."); } + @Test + public void testTridentSource_succeeds() throws ParsingException, IOException { + String input = "@source p[2] : trident(\"" + EXAMPLE_TRIDENT_PATH + "\") ."; + DataSource parsed = RuleParser.parseDataSourceDeclaration(input).getDataSource(); + TridentDataSource expected = new TridentDataSource(EXAMPLE_TRIDENT_PATH); + + assertEquals(expected, parsed); + } + + @Test(expected = ParsingException.class) + public void testTridentSourcewrongParameterCount_fails() throws ParsingException, IOException { + String input = "@source p[2] : trident(\"" + EXAMPLE_TRIDENT_PATH + "\", 42) ."; + RuleParser.parseDataSourceDeclaration(input).getDataSource(); + } + + @Test(expected = ParsingException.class) + public void testTridentSourcewrongParameterType_fails() throws ParsingException, IOException { + String input = "@source p[2] : trident(42) ."; + RuleParser.parseDataSourceDeclaration(input).getDataSource(); + } + } From 4218319d6e026ba29b99342eb11823559cb4a94e Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 25 Aug 2020 14:54:52 +0200 Subject: [PATCH 0739/1003] mention trident support --- RELEASE-NOTES.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 71a54cce9..ec4e55973 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -7,6 +7,8 @@ Rulewerk v0.7.0 New features: * New interactive Rulewerk shell for rule reasoning from the command line client * Significant speedup in iterating over query results +* Support for using data from a Trident database, the recommended data source for large + RDF graphs in VLog * New class `LiteralQueryResultPrinter` for pretty-printing query results Other improvements: From 9e352ee772b7c9d6d192ace273f780a50106871e Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 25 Aug 2020 16:19:52 +0200 Subject: [PATCH 0740/1003] Tests for trident source The tests are currently disabled due to VLog bugs but can be enabled when fixed --- .../input/tridentTernaryFacts/_sample/kbstats | Bin 0 -> 140 bytes .../input/tridentTernaryFacts/_sample/p0/0 | Bin 0 -> 2 bytes .../tridentTernaryFacts/_sample/p0/0.idx | Bin 0 -> 19 bytes .../input/tridentTernaryFacts/_sample/p1/0 | Bin 0 -> 2 bytes .../tridentTernaryFacts/_sample/p1/0.idx | Bin 0 -> 19 bytes .../input/tridentTernaryFacts/_sample/p2/0 | 1 + .../tridentTernaryFacts/_sample/p2/0.idx | Bin 0 -> 19 bytes .../input/tridentTernaryFacts/_sample/p3/0 | Bin 0 -> 2 bytes .../tridentTernaryFacts/_sample/p3/0.idx | Bin 0 -> 19 bytes .../input/tridentTernaryFacts/_sample/p4/0 | Bin 0 -> 2 bytes .../tridentTernaryFacts/_sample/p4/0.idx | Bin 0 -> 19 bytes .../input/tridentTernaryFacts/_sample/p5/0 | 1 + .../tridentTernaryFacts/_sample/p5/0.idx | Bin 0 -> 19 bytes .../input/tridentTernaryFacts/_sample/tree/0 | Bin 0 -> 12000 bytes .../tridentTernaryFacts/_sample/tree/idx | Bin 0 -> 34 bytes .../tridentTernaryFacts/_sample/tree/tree | Bin 0 -> 16 bytes .../data/input/tridentTernaryFacts/dict/0/0 | Bin 0 -> 23 bytes .../data/input/tridentTernaryFacts/dict/0/idx | Bin 0 -> 34 bytes .../data/input/tridentTernaryFacts/dict/0/sb | Bin 0 -> 59 bytes .../input/tridentTernaryFacts/dict/0/sb.idx | Bin 0 -> 16 bytes .../input/tridentTernaryFacts/dict/0/tree | Bin 0 -> 16 bytes .../input/tridentTernaryFacts/invdict/0/0 | Bin 0 -> 18 bytes .../input/tridentTernaryFacts/invdict/0/idx | Bin 0 -> 34 bytes .../input/tridentTernaryFacts/invdict/0/tree | Bin 0 -> 16 bytes .../data/input/tridentTernaryFacts/kbstats | Bin 0 -> 140 bytes .../test/data/input/tridentTernaryFacts/p0/0 | Bin 0 -> 4 bytes .../data/input/tridentTernaryFacts/p0/0.idx | Bin 0 -> 19 bytes .../test/data/input/tridentTernaryFacts/p1/0 | Bin 0 -> 4 bytes .../data/input/tridentTernaryFacts/p1/0.idx | Bin 0 -> 30 bytes .../test/data/input/tridentTernaryFacts/p2/0 | 1 + .../data/input/tridentTernaryFacts/p2/0.idx | Bin 0 -> 30 bytes .../test/data/input/tridentTernaryFacts/p3/0 | Bin 0 -> 4 bytes .../data/input/tridentTernaryFacts/p3/0.idx | Bin 0 -> 19 bytes .../test/data/input/tridentTernaryFacts/p4/0 | Bin 0 -> 4 bytes .../data/input/tridentTernaryFacts/p4/0.idx | Bin 0 -> 30 bytes .../test/data/input/tridentTernaryFacts/p5/0 | 1 + .../data/input/tridentTernaryFacts/p5/0.idx | Bin 0 -> 30 bytes .../data/input/tridentTernaryFacts/tree/0 | Bin 0 -> 12000 bytes .../data/input/tridentTernaryFacts/tree/idx | Bin 0 -> 34 bytes .../data/input/tridentTernaryFacts/tree/tree | Bin 0 -> 16 bytes .../vlog/VLogReasonerTridentInput.java | 98 ++++++++++++++++++ 41 files changed, 102 insertions(+) create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/kbstats create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p0/0 create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p0/0.idx create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p1/0 create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p1/0.idx create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p2/0 create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p2/0.idx create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p3/0 create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p3/0.idx create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p4/0 create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p4/0.idx create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p5/0 create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p5/0.idx create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/tree/0 create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/tree/idx create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/tree/tree create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/0 create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/idx create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/sb create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/sb.idx create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/tree create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/invdict/0/0 create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/invdict/0/idx create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/invdict/0/tree create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/kbstats create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p0/0 create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p0/0.idx create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p1/0 create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p1/0.idx create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p2/0 create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p2/0.idx create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p3/0 create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p3/0.idx create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p4/0 create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p4/0.idx create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p5/0 create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p5/0.idx create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/tree/0 create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/tree/idx create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/tree/tree create mode 100644 rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerTridentInput.java diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/kbstats b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/kbstats new file mode 100644 index 0000000000000000000000000000000000000000..4fecafaf21427268bc0b14646a99f99536b7edb8 GIT binary patch literal 140 YcmZQz00Tw{1t*dDY;aKqm@sW<00vY56#xJL literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p0/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p0/0 new file mode 100644 index 0000000000000000000000000000000000000000..d6db588e88905ed0aaaf65a947716182301341c9 GIT binary patch literal 2 JcmZQz0RR9700jU5 literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p0/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p0/0.idx new file mode 100644 index 0000000000000000000000000000000000000000..e80ed9ccb03bbd79472f326ba30480cc96bf9f6c GIT binary patch literal 19 QcmZQz00Tw{1tggp005-`g#Z8m literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p1/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p1/0 new file mode 100644 index 0000000000000000000000000000000000000000..a903574af00b573ad9bdb2bccf8d93ed00c675de GIT binary patch literal 2 JcmZQz1^@sB00aO4 literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p1/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p1/0.idx new file mode 100644 index 0000000000000000000000000000000000000000..ed5f2a0b2a5a51c51de25b0024a87ed0a5c9d3bb GIT binary patch literal 19 QcmZQz00Tw{1teJ-005@|h5!Hn literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p2/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p2/0 new file mode 100644 index 000000000..6bebb85a7 --- /dev/null +++ b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p2/0 @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p2/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p2/0.idx new file mode 100644 index 0000000000000000000000000000000000000000..587e420f8241bce503556ed9ef553e2cb808626b GIT binary patch literal 19 PcmZQz00Tw{1tJ>&0G|MY literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p3/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p3/0 new file mode 100644 index 0000000000000000000000000000000000000000..d825e1ad776558a390c09389f5b2ce26cd573be3 GIT binary patch literal 2 JcmZQ!0000A00jU5 literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p3/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p3/0.idx new file mode 100644 index 0000000000000000000000000000000000000000..e80ed9ccb03bbd79472f326ba30480cc96bf9f6c GIT binary patch literal 19 QcmZQz00Tw{1tggp005-`g#Z8m literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p4/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p4/0 new file mode 100644 index 0000000000000000000000000000000000000000..15294a501aa6e73201b85ff460b2fcf0adb11e48 GIT binary patch literal 2 JcmZQ(0000800aO4 literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p4/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p4/0.idx new file mode 100644 index 0000000000000000000000000000000000000000..ed5f2a0b2a5a51c51de25b0024a87ed0a5c9d3bb GIT binary patch literal 19 QcmZQz00Tw{1teJ-005@|h5!Hn literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p5/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p5/0 new file mode 100644 index 000000000..938838043 --- /dev/null +++ b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p5/0 @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p5/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p5/0.idx new file mode 100644 index 0000000000000000000000000000000000000000..587e420f8241bce503556ed9ef553e2cb808626b GIT binary patch literal 19 PcmZQz00Tw{1tJ>&0G|MY literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/tree/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/tree/0 new file mode 100644 index 0000000000000000000000000000000000000000..15b24cdc32e3343b105e3df38b18498060b9a638 GIT binary patch literal 12000 zcmeIuF$%yS5Cp&#Z3HVxnKBQ})04)|(sB-#gJr8pQnRkMr_Z#ETz_?!eF6js z5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNA}O5g*5 Chy#!S literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/tree/idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/tree/idx new file mode 100644 index 0000000000000000000000000000000000000000..6fce09e5e30ac49abd9a3dbcd05ac96d0b976820 GIT binary patch literal 34 XcmZQzU|?imVBiE17{G*qLGJ+o1C9Ys literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/tree/tree b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/tree/tree new file mode 100644 index 0000000000000000000000000000000000000000..c9426b2a5e5c2105de52e6196409e5222396a68b GIT binary patch literal 16 LcmZQzKmm*Z01*HJ literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/0 new file mode 100644 index 0000000000000000000000000000000000000000..52c0cfce6fc7cd1c7f89c808e71a0ef13120e463 GIT binary patch literal 23 ZcmZQzU|?l{038NZMmY%v7G@?!1^@-d0GI#( literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/idx new file mode 100644 index 0000000000000000000000000000000000000000..1e2cec818bf1b680b251552cbab62ce7de0d0b9a GIT binary patch literal 34 WcmZQzU|?imVBiE17(g5-A`Son*Z?{J literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/sb b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/sb new file mode 100644 index 0000000000000000000000000000000000000000..4c0edb8f3ac850c4e6b582d7a193cf6ca1b2a01b GIT binary patch literal 59 zcmew)D#l=wQBqQ1rLUh_k(gVMld6|rl&)WBC&tJmQeY+VL_dm82Gz NC={0zW#*+T0RRNA5WoNc literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/sb.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/sb.idx new file mode 100644 index 0000000000000000000000000000000000000000..8e4251486dfcdcc7343027358ae26547c08f355e GIT binary patch literal 16 NcmcC!fBdx&SZ$ literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/invdict/0/tree b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/invdict/0/tree new file mode 100644 index 0000000000000000000000000000000000000000..c9426b2a5e5c2105de52e6196409e5222396a68b GIT binary patch literal 16 LcmZQzKmm*Z01*HJ literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/kbstats b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/kbstats new file mode 100644 index 0000000000000000000000000000000000000000..1ee9a05369656705c413eff78239bc5bf4f6d286 GIT binary patch literal 140 gcmZQz00Tw{#lQ-snUMHwP&SZ)DT7H9qDfN^01YPqDgXcg literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p0/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p0/0 new file mode 100644 index 0000000000000000000000000000000000000000..36efe2295040d70ebc3425fde69b4da7fa0d2642 GIT binary patch literal 4 LcmZQzVPpaT02BZS literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p0/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p0/0.idx new file mode 100644 index 0000000000000000000000000000000000000000..e80ed9ccb03bbd79472f326ba30480cc96bf9f6c GIT binary patch literal 19 QcmZQz00Tw{1tggp005-`g#Z8m literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p1/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p1/0 new file mode 100644 index 0000000000000000000000000000000000000000..9c47accf380470c3e2d170febfbe9c5135850c2c GIT binary patch literal 4 LcmZQ%W?%*Y02BZS literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p1/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p1/0.idx new file mode 100644 index 0000000000000000000000000000000000000000..28fe596360999e93dfc88b2edc2c7c6bd13961c2 GIT binary patch literal 30 WcmZQz00Sln1tggoKuj>7r2zm2`~eF9 literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p2/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p2/0 new file mode 100644 index 000000000..fe6a93a9d --- /dev/null +++ b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p2/0 @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p2/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p2/0.idx new file mode 100644 index 0000000000000000000000000000000000000000..b5b5e052cb82ef1d19d680d81225082613236ff5 GIT binary patch literal 30 VcmZQz00Sln1tJ?j1enj*000Ku0R{j7 literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p3/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p3/0 new file mode 100644 index 0000000000000000000000000000000000000000..51ab423f013f2a85c59b8a0fa60aface206a921e GIT binary patch literal 4 LcmZQ#WMKdR02crV literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p3/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p3/0.idx new file mode 100644 index 0000000000000000000000000000000000000000..e80ed9ccb03bbd79472f326ba30480cc96bf9f6c GIT binary patch literal 19 QcmZQz00Tw{1tggp005-`g#Z8m literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p4/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p4/0 new file mode 100644 index 0000000000000000000000000000000000000000..01cbafe4da82ee422876b6f2ea19426637da1da3 GIT binary patch literal 4 LcmZQ(WM%*W02u%X literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p4/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p4/0.idx new file mode 100644 index 0000000000000000000000000000000000000000..28fe596360999e93dfc88b2edc2c7c6bd13961c2 GIT binary patch literal 30 WcmZQz00Sln1tggoKuj>7r2zm2`~eF9 literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p5/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p5/0 new file mode 100644 index 000000000..7926fc053 --- /dev/null +++ b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p5/0 @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p5/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p5/0.idx new file mode 100644 index 0000000000000000000000000000000000000000..b5b5e052cb82ef1d19d680d81225082613236ff5 GIT binary patch literal 30 VcmZQz00Sln1tJ?j1enj*000Ku0R{j7 literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/tree/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/tree/0 new file mode 100644 index 0000000000000000000000000000000000000000..0635f5470e1e88ec8490c8cfc47157468df04c9d GIT binary patch literal 12000 zcmeIuF$#b%5Cp)DXdzgMrDaMV>C;V!e!zMTmgTOvVv;O*r_{Qyd**C8a;CQPRg!z8 zwC)-E)VgQf%KiP&1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBly RK!5-N0t5&UAVA=czynW21{?qY literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/tree/idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/tree/idx new file mode 100644 index 0000000000000000000000000000000000000000..f8cf365b4f6a453048cc7fb3372d6815fe549f22 GIT binary patch literal 34 XcmZQzU|?imVBiE17{G&pLGJ+o1M&f4 literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/tree/tree b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/tree/tree new file mode 100644 index 0000000000000000000000000000000000000000..c9426b2a5e5c2105de52e6196409e5222396a68b GIT binary patch literal 16 LcmZQzKmm*Z01*HJ literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerTridentInput.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerTridentInput.java new file mode 100644 index 000000000..75e433e1a --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerTridentInput.java @@ -0,0 +1,98 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; + +import java.io.File; +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.Set; + +import org.junit.Ignore; +import org.junit.Test; +import org.mockito.internal.util.collections.Sets; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.TridentDataSource; + +public class VLogReasonerTridentInput { + + private static final Predicate ternaryPredicate = Expressions.makePredicate("triple", 3); + private static final PositiveLiteral queryAtom = Expressions.makePositiveLiteral(ternaryPredicate, + Expressions.makeUniversalVariable("s"), Expressions.makeUniversalVariable("p"), + Expressions.makeUniversalVariable("o")); + + @SuppressWarnings("unchecked") + private static final Set> expectedTernaryQueryResult = Sets.newSet( + Arrays.asList(Expressions.makeAbstractConstant("http://example.org/c1"), + Expressions.makeAbstractConstant("http://example.org/p"), + Expressions.makeAbstractConstant("http://example.org/c2")), + Arrays.asList(Expressions.makeAbstractConstant("http://example.org/c1"), + Expressions.makeAbstractConstant("http://example.org/q"), + Expressions.makeDatatypeConstant("test string", "http://www.w3.org/2001/XMLSchema#string"))); + + @Ignore + // Test fails, see https://github.com/karmaresearch/vlog/issues/55 + @Test + public void testLoadTernaryFactsFromSingleRdfDataSource() throws IOException { + final DataSource fileDataSource = new TridentDataSource( + FileDataSourceTestUtils.INPUT_FOLDER + "tridentTernaryFacts"); + + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(new DataSourceDeclarationImpl(ternaryPredicate, fileDataSource)); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + + final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtom, true); + final Set> queryResult = QueryResultsUtils.collectQueryResults(queryResultIterator); + + assertEquals(expectedTernaryQueryResult, queryResult); + } + } + + @Ignore + // Test fails, see https://github.com/karmaresearch/vlog/issues/56 + @Test(expected = IOException.class) + public void tridentDbDoesNotExist_fails() throws IOException { + final File nonexistingFile = new File("nonexisting"); + assertFalse(nonexistingFile.exists()); + final DataSource dataSource = new TridentDataSource(nonexistingFile.getName()); + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(new DataSourceDeclarationImpl(ternaryPredicate, dataSource)); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + } + } + +} From 27d823062724c976f05841e401f0c2e6b8cd9577 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 25 Aug 2020 16:22:49 +0200 Subject: [PATCH 0741/1003] remove unused imports --- .../rulewerk/reasoner/vlog/VLogReasonerTridentInput.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerTridentInput.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerTridentInput.java index 75e433e1a..9cf044481 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerTridentInput.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerTridentInput.java @@ -40,8 +40,6 @@ import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; -import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.TridentDataSource; public class VLogReasonerTridentInput { From a2de5b03180ba27019e59d8a2f6a51f3a037a4e3 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 25 Aug 2020 17:25:16 +0200 Subject: [PATCH 0742/1003] removed PromptProvider interface to simplify code --- .../rulewerk/client/shell/CommandReader.java | 9 +++--- .../client/shell/DefaultConfiguration.java | 5 ++-- .../client/shell/InteractiveShell.java | 3 +- .../rulewerk/client/shell/PromptProvider.java | 28 ------------------- 4 files changed, 9 insertions(+), 36 deletions(-) delete mode 100644 rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/PromptProvider.java diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java index 18400e686..5871389be 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java @@ -33,13 +33,13 @@ public class CommandReader { private final LineReader lineReader; - private final PromptProvider promptProvider; + private final AttributedString prompt; private final Interpreter interpreter; - public CommandReader(final LineReader lineReader, final PromptProvider promptProvider, + public CommandReader(final LineReader lineReader, final AttributedString prompt, final Interpreter interpreter) { this.lineReader = lineReader; - this.promptProvider = promptProvider; + this.prompt = prompt; this.interpreter = interpreter; } @@ -54,8 +54,7 @@ public CommandReader(final LineReader lineReader, final PromptProvider promptPro public Command readCommand() { String readLine; try { - final AttributedString prompt = this.promptProvider.getPrompt(); - readLine = this.lineReader.readLine(prompt.toAnsi(this.lineReader.getTerminal())); + readLine = this.lineReader.readLine(this.prompt.toAnsi(this.lineReader.getTerminal())); } catch (final UserInterruptException e) { if (e.getPartialLine().isEmpty()) { // Exit request from user CTRL+C diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java index e9cf0438f..0a5a2c688 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java @@ -43,10 +43,11 @@ public final class DefaultConfiguration { private DefaultConfiguration() { } - public static PromptProvider buildPromptProvider() { - return () -> new AttributedString("rulewerk>", AttributedStyle.DEFAULT.foreground(AttributedStyle.YELLOW)); + public static AttributedString buildPromptProvider() { + return new AttributedString("rulewerk>", AttributedStyle.DEFAULT.foreground(AttributedStyle.YELLOW)); } + public static LineReader buildLineReader(final Terminal terminal, final Interpreter interpreter) { final LineReaderBuilder lineReaderBuilder = LineReaderBuilder.builder().terminal(terminal) .appName("Rulewerk Shell").completer(buildCompleter(interpreter)) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java index 0bee3a90c..877cd19ad 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java @@ -24,6 +24,7 @@ import org.jline.reader.LineReader; import org.jline.terminal.Terminal; +import org.jline.utils.AttributedString; import org.semanticweb.rulewerk.commands.Interpreter; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; @@ -51,7 +52,7 @@ public static void run() throws IOException { final Shell shell = new Shell(interpreter); final LineReader lineReader = DefaultConfiguration.buildLineReader(terminal, interpreter); - final PromptProvider promptProvider = DefaultConfiguration.buildPromptProvider(); + final AttributedString promptProvider = DefaultConfiguration.buildPromptProvider(); final CommandReader commandReader = new CommandReader(lineReader, promptProvider, interpreter); shell.run(commandReader); diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/PromptProvider.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/PromptProvider.java deleted file mode 100644 index ff5fd6ea4..000000000 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/PromptProvider.java +++ /dev/null @@ -1,28 +0,0 @@ -package org.semanticweb.rulewerk.client.shell; - -/*- - * #%L - * Rulewerk Client - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import org.jline.utils.AttributedString; - -public interface PromptProvider { - - AttributedString getPrompt(); -} From b52726e1ad0c784d19e2b7751156bfe216cf8c12 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 25 Aug 2020 17:25:33 +0200 Subject: [PATCH 0743/1003] support loading OWL files --- rulewerk-commands/pom.xml | 10 +++ .../commands/LoadCommandInterpreter.java | 75 ++++++++++++++++++- .../src/test/data/loadtest-fails.owl | 1 + rulewerk-commands/src/test/data/loadtest.owl | 3 + .../commands/LoadCommandInterpreterTest.java | 56 ++++++++++++++ 5 files changed, 142 insertions(+), 3 deletions(-) create mode 100644 rulewerk-commands/src/test/data/loadtest-fails.owl create mode 100644 rulewerk-commands/src/test/data/loadtest.owl diff --git a/rulewerk-commands/pom.xml b/rulewerk-commands/pom.xml index 9e4eb1374..97509c6c6 100644 --- a/rulewerk-commands/pom.xml +++ b/rulewerk-commands/pom.xml @@ -27,5 +27,15 @@ rulewerk-parser ${project.version} + + ${project.groupId} + rulewerk-owlapi + ${project.version} + + + ${project.groupId} + rulewerk-rdf + ${project.version} + diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index b8b74b0b8..5d245cc4e 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -1,5 +1,7 @@ package org.semanticweb.rulewerk.commands; +import java.io.File; + /*- * #%L * Rulewerk Core Components @@ -23,17 +25,62 @@ import java.io.FileNotFoundException; import java.io.InputStream; +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyCreationException; +import org.semanticweb.owlapi.model.OWLOntologyManager; import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.TermType; +import org.semanticweb.rulewerk.owlapi.OwlToRulesConverter; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; +/** + * Interpreter for the load command. + * + * @author Markus Kroetzsch + * + */ public class LoadCommandInterpreter implements CommandInterpreter { + static final String TASK_RLS = "RULES"; + static final String TASK_OWL = "OWL"; + static final String TASK_RDF = "RDF"; + @Override public void run(Command command, Interpreter interpreter) throws CommandExecutionException { - Interpreter.validateArgumentCount(command, 1); - String fileName = Interpreter.extractStringArgument(command, 0, "filename"); + String task; + int pos = 0; + if (command.getArguments().size() > 0 && command.getArguments().get(0).fromTerm().isPresent() + && command.getArguments().get(0).fromTerm().get().getType() == TermType.ABSTRACT_CONSTANT) { + task = Interpreter.extractNameArgument(command, 0, "task"); + Interpreter.validateArgumentCount(command, 2); + pos++; + } else { + task = TASK_RLS; + Interpreter.validateArgumentCount(command, 1); + } + + String fileName = Interpreter.extractStringArgument(command, pos, "filename"); + + int countRulesBefore = interpreter.getKnowledgeBase().getRules().size(); + int countFactsBefore = interpreter.getKnowledgeBase().getFacts().size(); + + if (TASK_RLS.equals(task)) { + loadKb(interpreter, fileName); + } else if (TASK_OWL.equals(task)) { + loadOwl(interpreter, fileName); + } else { + throw new CommandExecutionException("Unknown task " + task + ". Should be " + TASK_RLS + " or " + TASK_OWL); + } + interpreter.printNormal( + "Loaded " + (interpreter.getKnowledgeBase().getFacts().size() - countFactsBefore) + " new fact(s) and " + + (interpreter.getKnowledgeBase().getRules().size() - countRulesBefore) + " new rule(s)\n"); + + } + + private void loadKb(Interpreter interpreter, String fileName) throws CommandExecutionException { try { InputStream inputStream = interpreter.getFileInputStream(fileName); RuleParser.parseInto(interpreter.getKnowledgeBase(), inputStream); @@ -44,9 +91,31 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } } + private void loadOwl(Interpreter interpreter, String fileName) throws CommandExecutionException { + final OWLOntologyManager ontologyManager = OWLManager.createOWLOntologyManager(); + OWLOntology ontology; + try { + ontology = ontologyManager.loadOntologyFromOntologyDocument(new File(fileName)); + } catch (OWLOntologyCreationException e) { + throw new CommandExecutionException("Problem loading OWL ontology: " + e.getMessage(), e); + } + interpreter.printNormal( + "Found OWL ontology with " + ontology.getLogicalAxiomCount() + " logical OWL axioms ...\n"); + + final OwlToRulesConverter owlToRulesConverter = new OwlToRulesConverter(); + owlToRulesConverter.addOntology(ontology); + + interpreter.getKnowledgeBase().addStatements(owlToRulesConverter.getRules()); + interpreter.getKnowledgeBase().addStatements(owlToRulesConverter.getFacts()); + } + @Override public void printHelp(String commandName, Interpreter interpreter) { - interpreter.printNormal("Usage: @" + commandName + " \n" + " file: path to a Rulewerk rls file\n"); + interpreter.printNormal("Usage: @" + commandName + " [TASK] \n" // + + " file: path to the file to load\n" // + + " TASK: optional; one of RULES (default) or OWL:\n" // + + " RULES to load a knowledge base in Rulewerk rls format\n" // + + " OWL to load an OWL ontology and convert it to rules\n"); } @Override diff --git a/rulewerk-commands/src/test/data/loadtest-fails.owl b/rulewerk-commands/src/test/data/loadtest-fails.owl new file mode 100644 index 000000000..3c8a426bb --- /dev/null +++ b/rulewerk-commands/src/test/data/loadtest-fails.owl @@ -0,0 +1 @@ +this is not a valid OWL file diff --git a/rulewerk-commands/src/test/data/loadtest.owl b/rulewerk-commands/src/test/data/loadtest.owl new file mode 100644 index 000000000..3c107a48f --- /dev/null +++ b/rulewerk-commands/src/test/data/loadtest.owl @@ -0,0 +1,3 @@ +@prefix : . + +:a rdf:type owl:NamedIndividual, :C . diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java index 5ed5a0f14..92613aaca 100644 --- a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java @@ -65,6 +65,53 @@ public void correctUse_succeeds() throws ParsingException, CommandExecutionExcep assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); } + @Test + public void correctUseWithRulesTask_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + InputStream inputStream = new ByteArrayInputStream("p(a) .".getBytes(StandardCharsets.UTF_8)); + Interpreter origInterpreter = InterpreterTest.getMockInterpreter(writer); + Interpreter interpreter = Mockito.spy(origInterpreter); + Mockito.doReturn(inputStream).when(interpreter).getFileInputStream(Mockito.eq("loadtest.rls")); + + Predicate predicate = Expressions.makePredicate("p", 1); + Term term = Expressions.makeAbstractConstant("a"); + Fact fact = Expressions.makeFact(predicate, term); + + Command command = interpreter.parseCommand("@load RULES 'loadtest.rls' ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + } + + @Test + public void correctUseWithOwlTask_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Predicate predicate = Expressions.makePredicate("http://example.org/C", 1); + Term term = Expressions.makeAbstractConstant("http://example.org/a"); + Fact fact = Expressions.makeFact(predicate, term); + + Command command = interpreter.parseCommand("@load OWL 'src/test/data/loadtest.owl' ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + } + + @Test(expected = CommandExecutionException.class) + public void correctUseWithOwlTask_malformedOwl_fails() + throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@load OWL 'src/test/data/loadtest-fails.owl' ."); + interpreter.runCommand(command); + } + @Test(expected = CommandExecutionException.class) public void correctUseParseError_fails() throws ParsingException, CommandExecutionException, IOException { StringWriter writer = new StringWriter(); @@ -106,6 +153,15 @@ public void wrongArgumentType_fails() throws ParsingException, CommandExecutionE interpreter.runCommand(command); } + @Test(expected = CommandExecutionException.class) + public void wrongTask_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@load UNKOWNTASK 'loadtest.rls' ."); + interpreter.runCommand(command); + } + @Test public void help_succeeds() throws ParsingException, CommandExecutionException { StringWriter writer = new StringWriter(); From c2dbe2a0ced6947e17fe669f76c8b658abe4489b Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 25 Aug 2020 17:33:30 +0200 Subject: [PATCH 0744/1003] use printNormal method for printing parsing error messages --- .../semanticweb/rulewerk/client/shell/CommandReader.java | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java index 5871389be..298127a95 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java @@ -36,8 +36,7 @@ public class CommandReader { private final AttributedString prompt; private final Interpreter interpreter; - public CommandReader(final LineReader lineReader, final AttributedString prompt, - final Interpreter interpreter) { + public CommandReader(final LineReader lineReader, final AttributedString prompt, final Interpreter interpreter) { this.lineReader = lineReader; this.prompt = prompt; this.interpreter = interpreter; @@ -82,9 +81,8 @@ public Command readCommand() { try { return this.interpreter.parseCommand(readLine); } catch (final ParsingException e) { - // FIXME do I need to flush terminal? - this.lineReader.getTerminal().writer() - .println("Error: " + e.getMessage() + "\n" + e.getCause().getMessage()); + this.interpreter.printNormal("Error: " + e.getMessage() + "\n" + e.getCause().getMessage()); + return null; } } From 118f50d8dc86e5bbbdb48e0123946b9b384bf260 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 25 Aug 2020 18:11:45 +0200 Subject: [PATCH 0745/1003] better handling of unsupported OWL axioms --- .../commands/LoadCommandInterpreter.java | 15 +++- .../src/test/data/loadtest-unsupported.owl | 5 ++ rulewerk-commands/src/test/data/loadtest.owl | 3 +- .../commands/LoadCommandInterpreterTest.java | 19 +++++ .../rulewerk/owlapi/OwlToRulesConverter.java | 72 ++++++++++++++++++- 5 files changed, 110 insertions(+), 4 deletions(-) create mode 100644 rulewerk-commands/src/test/data/loadtest-unsupported.owl diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index 5d245cc4e..3dda339e4 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -76,7 +76,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio interpreter.printNormal( "Loaded " + (interpreter.getKnowledgeBase().getFacts().size() - countFactsBefore) + " new fact(s) and " - + (interpreter.getKnowledgeBase().getRules().size() - countRulesBefore) + " new rule(s)\n"); + + (interpreter.getKnowledgeBase().getRules().size() - countRulesBefore) + " new rule(s).\n"); } @@ -102,8 +102,19 @@ private void loadOwl(Interpreter interpreter, String fileName) throws CommandExe interpreter.printNormal( "Found OWL ontology with " + ontology.getLogicalAxiomCount() + " logical OWL axioms ...\n"); - final OwlToRulesConverter owlToRulesConverter = new OwlToRulesConverter(); + final OwlToRulesConverter owlToRulesConverter = new OwlToRulesConverter(false); owlToRulesConverter.addOntology(ontology); + if (owlToRulesConverter.getUnsupportedAxiomsCount() > 0) { + interpreter.printImportant("Warning: Some OWL axioms could not be converted to rules.\n"); + owlToRulesConverter.getUnsupportedAxiomsSample() + .forEach((owlAxiom) -> interpreter.printNormal(owlAxiom.toString() + "\n")); + if (owlToRulesConverter.getUnsupportedAxiomsSample().size() < owlToRulesConverter + .getUnsupportedAxiomsCount()) { + interpreter.printNormal("...\n"); + } + interpreter.printNormal("Encountered " + owlToRulesConverter.getUnsupportedAxiomsCount() + + " unsupported logical axioms in total.\n"); + } interpreter.getKnowledgeBase().addStatements(owlToRulesConverter.getRules()); interpreter.getKnowledgeBase().addStatements(owlToRulesConverter.getFacts()); diff --git a/rulewerk-commands/src/test/data/loadtest-unsupported.owl b/rulewerk-commands/src/test/data/loadtest-unsupported.owl new file mode 100644 index 000000000..405a2c4a5 --- /dev/null +++ b/rulewerk-commands/src/test/data/loadtest-unsupported.owl @@ -0,0 +1,5 @@ +@prefix : . +@prefix owl: . + +:a a owl:NamedIndividual, :C . +:p a owl:ObjectProperty, owl:InverseFunctionalProperty . \ No newline at end of file diff --git a/rulewerk-commands/src/test/data/loadtest.owl b/rulewerk-commands/src/test/data/loadtest.owl index 3c107a48f..9b9d9f270 100644 --- a/rulewerk-commands/src/test/data/loadtest.owl +++ b/rulewerk-commands/src/test/data/loadtest.owl @@ -1,3 +1,4 @@ @prefix : . +@prefix owl: . -:a rdf:type owl:NamedIndividual, :C . +:a a owl:NamedIndividual, :C . diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java index 92613aaca..b29f99f14 100644 --- a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java @@ -101,6 +101,25 @@ public void correctUseWithOwlTask_succeeds() throws ParsingException, CommandExe assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); } + + @Test + public void correctUseWithOwlTask_UnsupportedAxioms_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Predicate predicate = Expressions.makePredicate("http://example.org/C", 1); + Term term = Expressions.makeAbstractConstant("http://example.org/a"); + Fact fact = Expressions.makeFact(predicate, term); + + Command command = interpreter.parseCommand("@load OWL 'src/test/data/loadtest-unsupported.owl' ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + // OUtput mentions the offending axiom in Functional-Style Syntax: + assertTrue(writer.toString().contains("InverseFunctionalObjectProperty()")); + } @Test(expected = CommandExecutionException.class) public void correctUseWithOwlTask_malformedOwl_fails() diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverter.java index f13f724fe..ec8c61a55 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverter.java @@ -1,5 +1,8 @@ package org.semanticweb.rulewerk.owlapi; +import java.util.ArrayList; +import java.util.List; + /*- * #%L * Rulewerk OWL API Support @@ -22,9 +25,12 @@ import java.util.Set; +import org.semanticweb.owlapi.model.OWLAxiom; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.Rule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Class for converting OWL ontologies to rules. @@ -34,8 +40,35 @@ */ public class OwlToRulesConverter { + private static Logger LOGGER = LoggerFactory.getLogger(OwlToRulesConverter.class); + final OwlAxiomToRulesConverter owlAxiomToRulesConverter = new OwlAxiomToRulesConverter(); + private final boolean failOnUnsupported; + private int unsupportedAxiomsCount = 0; + private final List unsupportedAxioms = new ArrayList<>(); + + /** + * Constructor. + * + * @param failOnUnsupported whether the converter should fail with an + * {@link OwlFeatureNotSupportedException} when + * encountering axioms that cannot be converted to + * rules or facts. + */ + public OwlToRulesConverter(boolean failOnUnsupported) { + this.failOnUnsupported = failOnUnsupported; + } + + /** + * Constructs an object that fails with a + * {@link OwlFeatureNotSupportedException} when encountering axioms that cannot + * be converted to rules or facts. + */ + public OwlToRulesConverter() { + this(true); + } + /** * Converts the given OWL ontology to rules and facts, and adds the result to * the internal buffer of rules and facts for later retrieval. @@ -44,7 +77,22 @@ public class OwlToRulesConverter { */ public void addOntology(final OWLOntology owlOntology) { this.owlAxiomToRulesConverter.startNewBlankNodeContext(); - owlOntology.axioms().forEach(owlAxiom -> owlAxiom.accept(this.owlAxiomToRulesConverter)); + owlOntology.axioms().forEach(owlAxiom -> { + try { + owlAxiom.accept(this.owlAxiomToRulesConverter); + } catch (OwlFeatureNotSupportedException e) { + if (failOnUnsupported) { + LOGGER.error(e.getMessage()); + throw e; + } else { + LOGGER.warn(e.getMessage()); + unsupportedAxiomsCount++; + if (unsupportedAxioms.size() < 10) { + unsupportedAxioms.add(owlAxiom); + } + } + } + }); } /** @@ -69,4 +117,26 @@ public Set getRules() { return this.owlAxiomToRulesConverter.rules; } + /** + * Returns the number of OWL axioms that could not be converted into rules. This + * number is only computed if the object is not configured to fail when + * encountering the first unsupported axiom. + * + * @return total number of unsupported axioms + */ + public int getUnsupportedAxiomsCount() { + return unsupportedAxiomsCount; + } + + /** + * Returns up to 10 unsupported axioms encountered during the conversion. The + * complete number of unsupported axioms can be queried using + * {@link #getUnsupportedAxiomsCount()}. + * + * @return list of first ten unsupported axioms that were encountered + */ + public List getUnsupportedAxiomsSample() { + return unsupportedAxioms; + } + } From b95deec26a1ccc6575705eb689394113f94a6577 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 25 Aug 2020 18:13:56 +0200 Subject: [PATCH 0746/1003] also test handling of missing file --- .../rulewerk/commands/LoadCommandInterpreterTest.java | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java index b29f99f14..52daa0c6a 100644 --- a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java @@ -130,6 +130,16 @@ public void correctUseWithOwlTask_malformedOwl_fails() Command command = interpreter.parseCommand("@load OWL 'src/test/data/loadtest-fails.owl' ."); interpreter.runCommand(command); } + + @Test(expected = CommandExecutionException.class) + public void correctUseWithOwlTask_missingFile_fails() + throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@load OWL 'src/test/data/file-does-not-exist.owl' ."); + interpreter.runCommand(command); + } @Test(expected = CommandExecutionException.class) public void correctUseParseError_fails() throws ParsingException, CommandExecutionException, IOException { From e3db16cd71a0a8ee871a4214d119830cbca0eafa Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 25 Aug 2020 18:23:44 +0200 Subject: [PATCH 0747/1003] simplify code remove CommandReader --- .../rulewerk/client/shell/CommandReader.java | 102 ------------------ .../client/shell/InteractiveShell.java | 5 +- .../rulewerk/client/shell/Shell.java | 100 ++++++++++++----- 3 files changed, 74 insertions(+), 133 deletions(-) delete mode 100644 rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java deleted file mode 100644 index 298127a95..000000000 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java +++ /dev/null @@ -1,102 +0,0 @@ -package org.semanticweb.rulewerk.client.shell; - -import org.jline.reader.EndOfFileException; - -/*- - * #%L - * Rulewerk Client - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import org.jline.reader.LineReader; -import org.jline.reader.UserInterruptException; -import org.jline.utils.AttributedString; -import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter; -import org.semanticweb.rulewerk.commands.Interpreter; -import org.semanticweb.rulewerk.core.model.api.Command; -import org.semanticweb.rulewerk.parser.ParsingException; - -public class CommandReader { - - private final LineReader lineReader; - private final AttributedString prompt; - private final Interpreter interpreter; - - public CommandReader(final LineReader lineReader, final AttributedString prompt, final Interpreter interpreter) { - this.lineReader = lineReader; - this.prompt = prompt; - this.interpreter = interpreter; - } - - /** - * Reads a command from the prompt and returns a corresponding {@link Command} - * object. If no command should be executed, null is returned. Some effort is - * made to interpret mistyped commands by adding @ and . before and after the - * input, if forgotten. - * - * @return command or null - */ - public Command readCommand() { - String readLine; - try { - readLine = this.lineReader.readLine(this.prompt.toAnsi(this.lineReader.getTerminal())); - } catch (final UserInterruptException e) { - if (e.getPartialLine().isEmpty()) { - // Exit request from user CTRL+C - return ExitCommandInterpreter.EXIT_COMMAND; - } else { - return null; // used as empty command - } - } catch (final EndOfFileException e) { - // Exit request from user CTRL+D - return ExitCommandInterpreter.EXIT_COMMAND; - - } - - readLine = readLine.trim(); - if ("".equals(readLine)) { - return null; - } - if (readLine.charAt(0) != '@') { - readLine = "@" + readLine; - } - if (readLine.charAt(readLine.length() - 1) != '.') { - readLine = readLine + " ."; - } - - try { - return this.interpreter.parseCommand(readLine); - } catch (final ParsingException e) { - this.interpreter.printNormal("Error: " + e.getMessage() + "\n" + e.getCause().getMessage()); - - return null; - } - } - -// /** -// * Sanitize the buffer input given the customizations applied to the JLine -// * parser (e.g. support for line continuations, etc.) -// */ -// static List sanitizeInput(List words) { -// words = words.stream().map(s -> s.replaceAll("^\\n+|\\n+$", "")) // CR at beginning/end of line introduced by -// // backslash continuation -// .map(s -> s.replaceAll("\\n+", " ")) // CR in middle of word introduced by return inside a quoted string -// .collect(Collectors.toList()); -// return words; -// } - -} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java index 877cd19ad..478299580 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java @@ -49,13 +49,12 @@ public static void run() throws IOException { final Terminal terminal = DefaultConfiguration.buildTerminal(); final Interpreter interpreter = initializeInterpreter(terminal); - final Shell shell = new Shell(interpreter); final LineReader lineReader = DefaultConfiguration.buildLineReader(terminal, interpreter); final AttributedString promptProvider = DefaultConfiguration.buildPromptProvider(); - final CommandReader commandReader = new CommandReader(lineReader, promptProvider, interpreter); - shell.run(commandReader); + final Shell shell = new Shell(lineReader, promptProvider, interpreter); + shell.run(); } diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index 0768042b6..8754598de 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -1,5 +1,10 @@ package org.semanticweb.rulewerk.client.shell; +import org.jline.reader.EndOfFileException; +import org.jline.reader.LineReader; +import org.jline.reader.UserInterruptException; +import org.jline.utils.AttributedString; + /*- * #%L * Rulewerk Client @@ -26,32 +31,37 @@ import org.semanticweb.rulewerk.commands.CommandInterpreter; import org.semanticweb.rulewerk.commands.Interpreter; import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.parser.ParsingException; public class Shell { private final Interpreter interpreter; + private final LineReader lineReader; + private final AttributedString prompt; boolean running; - public Shell(final Interpreter interpreter) { + public Shell(final LineReader lineReader, final AttributedString prompt, final Interpreter interpreter) { + this.lineReader = lineReader; + this.prompt = prompt; this.interpreter = interpreter; - CommandInterpreter exitCommandInterpreter = new ExitCommandInterpreter(this); + final CommandInterpreter exitCommandInterpreter = new ExitCommandInterpreter(this); for (final ExitCommandName exitCommandName : ExitCommandName.values()) { interpreter.registerCommandInterpreter(exitCommandName.toString(), exitCommandInterpreter); } } - public void run(final CommandReader commandReader) { - printWelcome(); + public void run() { + this.printWelcome(); - running = true; - while (running) { + this.running = true; + while (this.running) { final Command command; try { - command = commandReader.readCommand(); + command = this.readCommand(); } catch (final Exception e) { - interpreter.printNormal("Unexpected error: " + e.getMessage() + "\n"); + this.interpreter.printNormal("Unexpected error: " + e.getMessage() + "\n"); e.printStackTrace(); continue; } @@ -60,11 +70,56 @@ public void run(final CommandReader commandReader) { try { this.interpreter.runCommand(command); } catch (final CommandExecutionException e) { - interpreter.printNormal("Error: " + e.getMessage() + "\n"); + this.interpreter.printNormal("Error: " + e.getMessage() + "\n"); } } } - interpreter.printSection("Exiting Rulewerk shell ... bye.\n\n"); + this.interpreter.printSection("Exiting Rulewerk shell ... bye.\n\n"); + } + + /** + * Reads a command from the prompt and returns a corresponding {@link Command} + * object. If no command should be executed, null is returned. Some effort is + * made to interpret mistyped commands by adding @ and . before and after the + * input, if forgotten. + * + * @return command or null + */ + public Command readCommand() { + String readLine; + try { + readLine = this.lineReader.readLine(this.prompt.toAnsi(this.lineReader.getTerminal())); + } catch (final UserInterruptException e) { + if (e.getPartialLine().isEmpty()) { + // Exit request from user CTRL+C + return ExitCommandInterpreter.EXIT_COMMAND; + } else { + return null; // used as empty command + } + } catch (final EndOfFileException e) { + // Exit request from user CTRL+D + return ExitCommandInterpreter.EXIT_COMMAND; + + } + + readLine = readLine.trim(); + if ("".equals(readLine)) { + return null; + } + if (readLine.charAt(0) != '@') { + readLine = "@" + readLine; + } + if (readLine.charAt(readLine.length() - 1) != '.') { + readLine = readLine + " ."; + } + + try { + return this.interpreter.parseCommand(readLine); + } catch (final ParsingException e) { + this.interpreter.printNormal("Error: " + e.getMessage() + "\n" + e.getCause().getMessage()); + + return null; + } } public void exitShell() { @@ -72,24 +127,13 @@ public void exitShell() { } private void printWelcome() { - interpreter.printNormal("\n"); - interpreter.printSection("Welcome to the Rulewerk interactive shell.\n"); - interpreter.printNormal("For further information, type "); - interpreter.printCode("@help."); - interpreter.printNormal(" To quit, type "); - interpreter.printCode("@exit.\n"); - interpreter.printNormal("\n"); + this.interpreter.printNormal("\n"); + this.interpreter.printSection("Welcome to the Rulewerk interactive shell.\n"); + this.interpreter.printNormal("For further information, type "); + this.interpreter.printCode("@help."); + this.interpreter.printNormal(" To quit, type "); + this.interpreter.printCode("@exit.\n"); + this.interpreter.printNormal("\n"); } -// @Override -// public void handleResult(final Object result) { -// this.terminal.writer().println(result); -// this.terminal.writer().flush(); -// } - -// @Override -// public void handleResult(final AttributedCharSequence result) { -// this.terminal.writer().println(result.toAnsi(this.terminal)); -// this.terminal.writer().flush(); -// } } From 90fbe1ad895eb21bcf0a3566d44a2bbe9d7bc4a4 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 25 Aug 2020 22:59:25 +0200 Subject: [PATCH 0748/1003] handle plain "string" constants these were accidentally converted to abstract constants before; xsd:string seems more appropriate --- .../vlog/VLogFastQueryResultIterator.java | 4 ++-- .../reasoner/vlog/VLogToModelConverter.java | 11 ++++++++++- .../vlog/VLogToModelConverterTest.java | 19 ++++++++----------- 3 files changed, 20 insertions(+), 14 deletions(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java index 17acf658e..33e88a5d6 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java @@ -117,8 +117,8 @@ public void put(long id, Term term) { */ private final VLog vLog; /** - * VLog ids of the previous tuple, with the last id fixed to -1 (since it is never - * useful in caching). + * VLog ids of the previous tuple, with the last id fixed to -1 (since it is + * never useful in caching). */ private long[] prevIds = null; /** diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java index b237dc947..0824c0d73 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java @@ -23,7 +23,9 @@ import java.util.ArrayList; import java.util.List; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.QueryResult; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; @@ -117,8 +119,15 @@ static Constant toConstant(String vLogConstantName) { final String languageTag = vLogConstantName.substring(startTypeIdx + 1, vLogConstantName.length()); final String string = vLogConstantName.substring(1, startTypeIdx - 1); constant = new LanguageStringConstantImpl(string, languageTag); + } else if (vLogConstantName.charAt(vLogConstantName.length() - 1) == '"' + && vLogConstantName.length() > 1) { + // This is already an unexpceted case. Untyped strings "constant" should not + // occur. But if they do, this is our best guess on how to interpret them. + constant = new DatatypeConstantImpl(vLogConstantName.substring(1, vLogConstantName.length() - 1), + PrefixDeclarationRegistry.XSD_STRING); } else { - constant = new AbstractConstantImpl(vLogConstantName); + throw new RulewerkRuntimeException("VLog returned a constant name '" + vLogConstantName + + "' that Rulewerk cannot make sense of."); } } } else { diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverterTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverterTest.java index 2e2db1b5c..3f0dd88a5 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverterTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverterTest.java @@ -20,10 +20,8 @@ * #L% */ import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; import org.junit.Test; -import org.semanticweb.rulewerk.core.model.api.AbstractConstant; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; import org.semanticweb.rulewerk.core.model.implementation.DatatypeConstantImpl; @@ -34,7 +32,8 @@ public class VLogToModelConverterTest { @Test public void testAbstractConstantConversion() { - final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, "c"); + final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, + "c"); final Term rulewerkTerm = new AbstractConstantImpl("c"); final Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); assertEquals(rulewerkTerm, convertedTerm); @@ -69,7 +68,8 @@ public void testLanguageStringConversion() { @Test public void testNamedNullConversion() { - final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, "_123"); + final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, + "_123"); final Term rulewerkTerm = new NamedNullImpl("_123"); final Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); assertEquals(rulewerkTerm, convertedTerm); @@ -77,18 +77,15 @@ public void testNamedNullConversion() { @Test(expected = IllegalArgumentException.class) public void testVariableConversion() { - final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, "X"); + final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, + "X"); VLogToModelConverter.toTerm(vLogTerm); } - @Test + @Test(expected = RuntimeException.class) public void testAbstractConstantContainingQuoteExpression() { final String constName = "\""; - final Term convertedTerm = VLogToModelConverter - .toTerm(new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, constName)); - assertTrue(convertedTerm.isConstant()); - assertTrue(convertedTerm instanceof AbstractConstant); - assertEquals(constName, convertedTerm.getName()); + VLogToModelConverter.toTerm(new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, constName)); } } From 0e49429ab6cf71936f8763a223b10ab7946a459b Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 26 Aug 2020 09:59:55 +0200 Subject: [PATCH 0749/1003] support loading RDF files --- .../commands/LoadCommandInterpreter.java | 71 ++++++++++- rulewerk-commands/src/test/data/loadtest.nt | 1 + rulewerk-commands/src/test/data/loadtest.rdf | 9 ++ rulewerk-commands/src/test/data/loadtest.ttl | 3 + .../commands/LoadCommandInterpreterTest.java | 112 +++++++++++++++--- rulewerk-rdf/pom.xml | 1 - 6 files changed, 180 insertions(+), 17 deletions(-) create mode 100644 rulewerk-commands/src/test/data/loadtest.nt create mode 100644 rulewerk-commands/src/test/data/loadtest.rdf create mode 100644 rulewerk-commands/src/test/data/loadtest.ttl diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index 3dda339e4..56ed2d104 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -23,17 +23,33 @@ */ import java.io.FileNotFoundException; +import java.io.IOException; import java.io.InputStream; - +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; + +import org.openrdf.model.Model; +import org.openrdf.model.Namespace; +import org.openrdf.model.impl.LinkedHashModel; +import org.openrdf.rio.RDFFormat; +import org.openrdf.rio.RDFHandlerException; +import org.openrdf.rio.RDFParseException; +import org.openrdf.rio.RDFParser; +import org.openrdf.rio.Rio; +import org.openrdf.rio.helpers.StatementCollector; import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyCreationException; import org.semanticweb.owlapi.model.OWLOntologyManager; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.model.api.TermType; import org.semanticweb.rulewerk.owlapi.OwlToRulesConverter; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; +import org.semanticweb.rulewerk.rdf.RdfModelConverter; /** * Interpreter for the load command. @@ -70,6 +86,8 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio loadKb(interpreter, fileName); } else if (TASK_OWL.equals(task)) { loadOwl(interpreter, fileName); + } else if (TASK_RDF.equals(task)) { + loadRdf(interpreter, fileName); } else { throw new CommandExecutionException("Unknown task " + task + ". Should be " + TASK_RLS + " or " + TASK_OWL); } @@ -87,7 +105,7 @@ private void loadKb(Interpreter interpreter, String fileName) throws CommandExec } catch (FileNotFoundException e) { throw new CommandExecutionException(e.getMessage(), e); } catch (ParsingException e) { - throw new CommandExecutionException("Error parsing file: " + e.getMessage(), e); + throw new CommandExecutionException("Failed to parse Rulewerk file: " + e.getMessage(), e); } } @@ -120,6 +138,55 @@ private void loadOwl(Interpreter interpreter, String fileName) throws CommandExe interpreter.getKnowledgeBase().addStatements(owlToRulesConverter.getFacts()); } + private void loadRdf(Interpreter interpreter, String fileName) throws CommandExecutionException { + try { + String baseIri = new File(fileName).toURI().toString(); + + Iterator formatsToTry = Arrays.asList(RDFFormat.NTRIPLES, RDFFormat.TURTLE, RDFFormat.RDFXML) + .iterator(); + Model model = null; + List parseErrors = new ArrayList<>(); + while (model == null && formatsToTry.hasNext()) { + RDFFormat rdfFormat = formatsToTry.next(); + try { + InputStream inputStream = interpreter.getFileInputStream(fileName); + model = parseRdfFromStream(inputStream, rdfFormat, baseIri); + interpreter.printNormal("Found RDF document in format " + rdfFormat.getName() + " ...\n"); + } catch (RDFParseException | RDFHandlerException e) { + parseErrors.add("Failed to parse as " + rdfFormat.getName() + ": " + e.getMessage()); + } + } + if (model == null) { + String message = "Failed to parse RDF input:"; + for (String error : parseErrors) { + message += "\n " + error; + } + throw new CommandExecutionException(message); + } + + interpreter.getKnowledgeBase().addStatements(RdfModelConverter.rdfModelToFacts(model)); + for (Namespace namespace : model.getNamespaces()) { + try { + interpreter.getKnowledgeBase().getPrefixDeclarationRegistry() + .setPrefixIri(namespace.getPrefix() + ":", namespace.getName()); + } catch (PrefixDeclarationException e) { + // ignore this prefix + } + } + } catch (IOException e) { + throw new CommandExecutionException("Could not read input: " + e.getMessage(), e); + } + } + + private Model parseRdfFromStream(InputStream inputStream, RDFFormat rdfFormat, String baseIri) + throws RDFParseException, RDFHandlerException, IOException { + final Model model = new LinkedHashModel(); + final RDFParser rdfParser = Rio.createParser(rdfFormat); + rdfParser.setRDFHandler(new StatementCollector(model)); + rdfParser.parse(inputStream, baseIri); + return model; + } + @Override public void printHelp(String commandName, Interpreter interpreter) { interpreter.printNormal("Usage: @" + commandName + " [TASK] \n" // diff --git a/rulewerk-commands/src/test/data/loadtest.nt b/rulewerk-commands/src/test/data/loadtest.nt new file mode 100644 index 000000000..89536774b --- /dev/null +++ b/rulewerk-commands/src/test/data/loadtest.nt @@ -0,0 +1 @@ + . diff --git a/rulewerk-commands/src/test/data/loadtest.rdf b/rulewerk-commands/src/test/data/loadtest.rdf new file mode 100644 index 000000000..affae3f94 --- /dev/null +++ b/rulewerk-commands/src/test/data/loadtest.rdf @@ -0,0 +1,9 @@ + + + + + + + + diff --git a/rulewerk-commands/src/test/data/loadtest.ttl b/rulewerk-commands/src/test/data/loadtest.ttl new file mode 100644 index 000000000..3fbe612de --- /dev/null +++ b/rulewerk-commands/src/test/data/loadtest.ttl @@ -0,0 +1,3 @@ +@prefix : . + +:a :b :c . diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java index 52daa0c6a..ded18aa69 100644 --- a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java @@ -32,6 +32,7 @@ import org.junit.Test; import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.Predicate; @@ -85,6 +86,29 @@ public void correctUseWithRulesTask_succeeds() throws ParsingException, CommandE assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); } + @Test(expected = CommandExecutionException.class) + public void correctUseParseError_fails() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + InputStream inputStream = new ByteArrayInputStream("not parsable".getBytes(StandardCharsets.UTF_8)); + Interpreter origInterpreter = InterpreterTest.getMockInterpreter(writer); + Interpreter interpreter = Mockito.spy(origInterpreter); + Mockito.doReturn(inputStream).when(interpreter).getFileInputStream(Mockito.eq("loadtest.rls")); + + Command command = interpreter.parseCommand("@load 'loadtest.rls' ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void correctUseFileNotFoundError_fails() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter origInterpreter = InterpreterTest.getMockInterpreter(writer); + Interpreter interpreter = Mockito.spy(origInterpreter); + Mockito.doThrow(FileNotFoundException.class).when(interpreter).getFileInputStream(Mockito.eq("loadtest.rls")); + + Command command = interpreter.parseCommand("@load 'loadtest.rls' ."); + interpreter.runCommand(command); + } + @Test public void correctUseWithOwlTask_succeeds() throws ParsingException, CommandExecutionException, IOException { StringWriter writer = new StringWriter(); @@ -101,9 +125,10 @@ public void correctUseWithOwlTask_succeeds() throws ParsingException, CommandExe assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); } - + @Test - public void correctUseWithOwlTask_UnsupportedAxioms_succeeds() throws ParsingException, CommandExecutionException, IOException { + public void correctUseWithOwlTask_UnsupportedAxioms_succeeds() + throws ParsingException, CommandExecutionException, IOException { StringWriter writer = new StringWriter(); Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); @@ -130,7 +155,7 @@ public void correctUseWithOwlTask_malformedOwl_fails() Command command = interpreter.parseCommand("@load OWL 'src/test/data/loadtest-fails.owl' ."); interpreter.runCommand(command); } - + @Test(expected = CommandExecutionException.class) public void correctUseWithOwlTask_missingFile_fails() throws ParsingException, CommandExecutionException, IOException { @@ -141,26 +166,85 @@ public void correctUseWithOwlTask_missingFile_fails() interpreter.runCommand(command); } + @Test + public void correctUseWithRdfTask_Nt_succeeds() + throws ParsingException, CommandExecutionException, IOException, PrefixDeclarationException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Predicate predicate = Expressions.makePredicate("TRIPLE", 3); + Term terma = Expressions.makeAbstractConstant("http://example.org/a"); + Term termb = Expressions.makeAbstractConstant("http://example.org/b"); + Term termc = Expressions.makeAbstractConstant("http://example.org/c"); + Fact fact = Expressions.makeFact(predicate, terma, termb, termc); + + Command command = interpreter.parseCommand("@load RDF 'src/test/data/loadtest.nt' ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + } + + @Test + public void correctUseWithRdfTask_Turtle_succeeds() + throws ParsingException, CommandExecutionException, IOException, PrefixDeclarationException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Predicate predicate = Expressions.makePredicate("TRIPLE", 3); + Term terma = Expressions.makeAbstractConstant("http://example.org/a"); + Term termb = Expressions.makeAbstractConstant("http://example.org/b"); + Term termc = Expressions.makeAbstractConstant("http://example.org/c"); + Fact fact = Expressions.makeFact(predicate, terma, termb, termc); + + Command command = interpreter.parseCommand("@load RDF 'src/test/data/loadtest.ttl' ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertEquals("http://example.org/", interpreter.getKnowledgeBase().getPrefixIri(":")); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + } + + @Test + public void correctUseWithRdfTask_RdfXml_succeeds() + throws ParsingException, CommandExecutionException, IOException, PrefixDeclarationException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Predicate predicate = Expressions.makePredicate("TRIPLE", 3); + Term terma = Expressions.makeAbstractConstant("http://example.org/a"); + Term termb = Expressions.makeAbstractConstant("http://example.org/b"); + Term termc = Expressions.makeAbstractConstant("http://example.org/c"); + Fact fact = Expressions.makeFact(predicate, terma, termb, termc); + + Command command = interpreter.parseCommand("@load RDF 'src/test/data/loadtest.rdf' ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertEquals("http://example.org/", interpreter.getKnowledgeBase().getPrefixIri("eg:")); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + } + @Test(expected = CommandExecutionException.class) - public void correctUseParseError_fails() throws ParsingException, CommandExecutionException, IOException { + public void correctUseWithRdfTask_malformedRdf_fails() + throws ParsingException, CommandExecutionException, IOException { StringWriter writer = new StringWriter(); - InputStream inputStream = new ByteArrayInputStream("not parsable".getBytes(StandardCharsets.UTF_8)); - Interpreter origInterpreter = InterpreterTest.getMockInterpreter(writer); - Interpreter interpreter = Mockito.spy(origInterpreter); - Mockito.doReturn(inputStream).when(interpreter).getFileInputStream(Mockito.eq("loadtest.rls")); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); - Command command = interpreter.parseCommand("@load 'loadtest.rls' ."); + Command command = interpreter.parseCommand("@load RDF 'src/test/data/loadtest-fails.owl' ."); interpreter.runCommand(command); } @Test(expected = CommandExecutionException.class) - public void correctUseFileNotFoundError_fails() throws ParsingException, CommandExecutionException, IOException { + public void correctUseWithRdfTask_missingFile_fails() + throws ParsingException, CommandExecutionException, IOException { StringWriter writer = new StringWriter(); - Interpreter origInterpreter = InterpreterTest.getMockInterpreter(writer); - Interpreter interpreter = Mockito.spy(origInterpreter); - Mockito.doThrow(FileNotFoundException.class).when(interpreter).getFileInputStream(Mockito.eq("loadtest.rls")); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); - Command command = interpreter.parseCommand("@load 'loadtest.rls' ."); + Command command = interpreter.parseCommand("@load RDF 'src/test/data/file-does-not-exist.rdf' ."); interpreter.runCommand(command); } diff --git a/rulewerk-rdf/pom.xml b/rulewerk-rdf/pom.xml index 16e796b43..fe97c337b 100644 --- a/rulewerk-rdf/pom.xml +++ b/rulewerk-rdf/pom.xml @@ -46,7 +46,6 @@ org.openrdf.sesame sesame-rio-turtle ${openrdf.sesame.version} - test From 391e4cf22f25af9789fe6d9f98e1712dbcb0d1bb Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 26 Aug 2020 10:15:18 +0200 Subject: [PATCH 0750/1003] updated help --- .../rulewerk/commands/LoadCommandInterpreter.java | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index 56ed2d104..3fc22b564 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -89,7 +89,8 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } else if (TASK_RDF.equals(task)) { loadRdf(interpreter, fileName); } else { - throw new CommandExecutionException("Unknown task " + task + ". Should be " + TASK_RLS + " or " + TASK_OWL); + throw new CommandExecutionException( + "Unknown task " + task + ". Should be one of " + TASK_RLS + ", " + TASK_OWL + ", " + TASK_RDF); } interpreter.printNormal( @@ -191,14 +192,15 @@ private Model parseRdfFromStream(InputStream inputStream, RDFFormat rdfFormat, S public void printHelp(String commandName, Interpreter interpreter) { interpreter.printNormal("Usage: @" + commandName + " [TASK] \n" // + " file: path to the file to load\n" // - + " TASK: optional; one of RULES (default) or OWL:\n" // + + " TASK: optional; one of RULES (default), OWL, RDF:\n" // + " RULES to load a knowledge base in Rulewerk rls format\n" // - + " OWL to load an OWL ontology and convert it to rules\n"); + + " OWL to load an OWL ontology and convert it to facts and rules\n" // + + " RDF to load an RDF document and convert it to facts for predicate TRIPLE[3]\n"); } @Override public String getSynopsis() { - return "load a knowledge base from file (in Rulewerk rls format)"; + return "load a knowledge base from file (in Rulewerk format, OWL, or RDF)"; } } From 33e39b8cf6f851f40deb8911a9fd2007e540b1a6 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 26 Aug 2020 10:26:38 +0200 Subject: [PATCH 0751/1003] add missing \n after error --- .../main/java/org/semanticweb/rulewerk/client/shell/Shell.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index 8754598de..a7b777f9c 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -116,8 +116,7 @@ public Command readCommand() { try { return this.interpreter.parseCommand(readLine); } catch (final ParsingException e) { - this.interpreter.printNormal("Error: " + e.getMessage() + "\n" + e.getCause().getMessage()); - + this.interpreter.printNormal("Error: " + e.getMessage() + "\n" + e.getCause().getMessage() + "\n"); return null; } } From 0d120f15139d04381c150584043d668174b47e60 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 26 Aug 2020 10:37:53 +0200 Subject: [PATCH 0752/1003] support @retract predicate[arity]. --- .../commands/AddSourceCommandInterpreter.java | 4 +- .../commands/RetractCommandInterpreter.java | 15 ++++++-- .../RetractCommandInterpreterTest.java | 38 ++++++++++++++++++- 3 files changed, 50 insertions(+), 7 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java index 8f8a2e4b7..bf7e2aad8 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java @@ -75,8 +75,8 @@ static Predicate extractPredicate(String predicateDeclaration) throws CommandExe arity = Integer.parseInt(arityString); } catch (IndexOutOfBoundsException | NumberFormatException e) { throw new CommandExecutionException( - "Predicate declaration must have the format \"predicateName[number]\" but was " - + predicateDeclaration); + "Predicate declaration must have the format \"predicateName[number]\" but was \"" + + predicateDeclaration + "\"."); } return Expressions.makePredicate(predicateName, arity); } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java index 9acdbb4a3..2e74580c8 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java @@ -24,6 +24,7 @@ import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.implementation.Expressions; public class RetractCommandInterpreter implements CommandInterpreter { @@ -44,9 +45,14 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio factCount += interpreter.getKnowledgeBase().removeStatement(fact); } else if (argument.fromRule().isPresent()) { ruleCount += interpreter.getKnowledgeBase().removeStatement(argument.fromRule().get()); - } else { - throw new CommandExecutionException( - "Only facts and rules can be retracted. Encountered " + argument.toString()); + } else { // implies argument.fromTerm().isPresent() + String predicateDeclaration = Interpreter.extractStringArgument(command, 0, "predicateName[arity]"); + Predicate predicate = AddSourceCommandInterpreter.extractPredicate(predicateDeclaration); + for (Fact fact : interpreter.getKnowledgeBase().getFacts()) { + if (predicate.equals(fact.getPredicate())) { + factCount += interpreter.getKnowledgeBase().removeStatement(fact); + } + } } } @@ -56,7 +62,8 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio @Override public void printHelp(String commandName, Interpreter interpreter) { interpreter.printNormal("Usage: @" + commandName + " ()+ .\n" - + " fact or rule: statement(s) to be removed from the knowledge base\n" + + " fact or rule: statement(s) to be removed from the knowledge base, or a predicate declaration\n" + + " of the form name[arity] to remove all facts for that predicate.\n" + "Reasoning needs to be invoked after finishing the removal of statements.\n"); } diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreterTest.java index 3381ac9ec..f9a8189cc 100644 --- a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreterTest.java +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreterTest.java @@ -72,9 +72,45 @@ public void correctUse_succeeds() throws ParsingException, CommandExecutionExcep assertTrue(rules.isEmpty()); assertTrue(dataSourceDeclarations.isEmpty()); } + + @Test + public void correctUse_retractPredicate_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + Term a = Expressions.makeAbstractConstant("a"); + Term b = Expressions.makeAbstractConstant("b"); + Predicate p = Expressions.makePredicate("p", 1); + Predicate q = Expressions.makePredicate("q", 1); + Fact pa = Expressions.makeFact(p, a); + Fact pb = Expressions.makeFact(p, b); + Fact qa = Expressions.makeFact(q, a); + + interpreter.getKnowledgeBase().addStatement(pa); + interpreter.getKnowledgeBase().addStatement(pb); + interpreter.getKnowledgeBase().addStatement(qa); + + Command command = interpreter.parseCommand("@retract p[1] ."); + interpreter.runCommand(command); + List facts = interpreter.getKnowledgeBase().getFacts(); + List rules = interpreter.getKnowledgeBase().getRules(); + List dataSourceDeclarations = interpreter.getKnowledgeBase().getDataSourceDeclarations(); + + assertEquals(Arrays.asList(qa), facts); + assertTrue(rules.isEmpty()); + assertTrue(dataSourceDeclarations.isEmpty()); + } @Test(expected = CommandExecutionException.class) - public void wrongArgumentTerm_fails() throws ParsingException, CommandExecutionException { + public void wrongArgumentTermNumber_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@retract 42 ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentTermStringNoPredicate_fails() throws ParsingException, CommandExecutionException { StringWriter writer = new StringWriter(); Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); From 5fd4a456e681cd45c7eb56ce23cff97ba931de25 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Wed, 26 Aug 2020 12:39:34 +0200 Subject: [PATCH 0753/1003] command parsing error print new line --- .../semanticweb/rulewerk/client/shell/DefaultConfiguration.java | 2 ++ .../main/java/org/semanticweb/rulewerk/client/shell/Shell.java | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java index 0a5a2c688..c53ad76be 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java @@ -57,8 +57,10 @@ public static LineReader buildLineReader(final Terminal terminal, final Interpre ; final LineReader lineReader = lineReaderBuilder.build(); + lineReader.unsetOpt(LineReader.Option.INSERT_TAB); // This allows completion on an empty buffer, rather than // inserting a tab + lineReader.setOpt(LineReader.Option.AUTO_FRESH_LINE); return lineReader; } diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index 8754598de..889db2e81 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -116,7 +116,7 @@ public Command readCommand() { try { return this.interpreter.parseCommand(readLine); } catch (final ParsingException e) { - this.interpreter.printNormal("Error: " + e.getMessage() + "\n" + e.getCause().getMessage()); + this.interpreter.printNormal("Error: " + e.getMessage() + "\n" + e.getCause().getMessage() + "\n"); return null; } From ce8331263f5f6b6887c0224df9ce1771ceb83fef Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 26 Aug 2020 13:30:51 +0200 Subject: [PATCH 0754/1003] expanded Skolemization features --- .../implementation/Skolemization.java | 103 ++++++++++++++++-- .../implementation/SkolemizationTest.java | 54 ++++++--- .../owlapi/OwlToRulesConversionHelper.java | 2 +- .../parser/javacc/JavaCCParserBase.java | 2 +- .../reasoner/vlog/TermToVLogConverter.java | 2 +- 5 files changed, 133 insertions(+), 30 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java index b0bc00877..35859a969 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java @@ -23,36 +23,117 @@ import java.io.ByteArrayOutputStream; import java.util.UUID; +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; import org.semanticweb.rulewerk.core.model.implementation.RenamedNamedNull; /** - * A class that implements skolemization of named null names. The same name - * should be skolemized to the same {@link NamedNull} when skolemized using the - * same instance, but to two different instances of {@link NamedNull} when - * skolemized using different instances of {@link Skolemization}. + * A class that implements skolemization and collision-free renaming of named + * nulls. The same name will always be renamed in the same way when using the + * same instance of {@link Skolemization}, but it is extremely unlikely that + * different names or different instances will ever produce the same name. + * + * This can be used to rename apart named nulls from different input sources to + * avoid clashes. There is also code for creating skolem constants with + * appropriate absolute IRIs. * * @author Maximilian Marx */ public class Skolemization { + + /** + * IRI prefix used for IRIs skolem constants in Rulewerk. + */ + public final static String SKOLEM_IRI_PREFIX = "https://rulewerk.semantic-web.org/.well-known/genid/"; + /** + * Prefix used to ensure that UUID-based local names do not start with a number. + */ + private final static String SKOLEM_UUID_START = "ID"; + /** * The namespace to use for skolemizing named null names. */ private final byte[] namedNullNamespace = UUID.randomUUID().toString().getBytes(); /** - * Skolemize a named null name. The same {@code name} will map to a - * {@link RenamedNamedNull} instance with the same name when called on the same - * instance. + * Creates a named null with a renamed name that is determined by the given + * original name. The result is a {@link RenamedNamedNull} to allow other code + * to recognise that no further renaming is necessary. * - * @return a {@link RenamedNamedNull} instance with a new name that is specific - * to this instance and {@code name}. + * @param name the name of the {@link NamedNull} to be renamed here (or any + * other string for which to create a unique renaming) + * @return a {@link RenamedNamedNull} with a new name that is specific to this + * instance and {@code name}. + */ + public RenamedNamedNull getRenamedNamedNull(String name) { + return new RenamedNamedNull(getFreshName(name)); + } + + /** + * Creates a skolem constant that is determined by the given original name. + * + * @param name the name of the {@link NamedNull} to skolemize (or any other + * string for which to create a unique renaming) + * @return a {@link AbstractConstant} with an IRI that is specific to this + * instance and {@code name}. + */ + public AbstractConstant getSkolemConstant(String name) { + return new AbstractConstantImpl(getSkolemConstantName(name)); + } + + /** + * Creates a skolem constant that is determined by the given {@link NamedNull}. + * The method ensures that a new unique name is generated unless the given + * object is already a {@link RenamedNamedNull}. + * + * @param namedNull the {@link NamedNull} to skolemize + * @return a {@link AbstractConstant} with an IRI that is specific to this + * instance and {@code namedNull}. + */ + public AbstractConstant getSkolemConstant(NamedNull namedNull) { + if (namedNull instanceof RenamedNamedNull) { + return new AbstractConstantImpl(getSkolemConstantNameFromUniqueName(namedNull.getName())); + } else { + return new AbstractConstantImpl(getSkolemConstantName(namedNull.getName())); + } + } + + /** + * Returns the name (IRI string) of a skolem constant for skolemising a named + * null of the given name. + * + * @param name the name of the {@link NamedNull} to be renamed here (or any + * other string for which to create a unique renaming) + * @return string that is an IRI for a skolem constant + */ + public String getSkolemConstantName(String name) { + return getSkolemConstantNameFromUniqueName(getFreshName(name).toString()); + } + + /** + * Returns a full skolem constant IRI string from its local id part. + * + * @param name local id of skolem constant + * @return IRI string + */ + private String getSkolemConstantNameFromUniqueName(String name) { + return SKOLEM_IRI_PREFIX + SKOLEM_UUID_START + name; + } + + /** + * Creates a fresh UUID based on the given string. The UUID is determined by the + * string and the instance of {@link Skolemization}. Other strings or instances + * are extremely unlikely to produce the same string. + * + * @param name the string to be renamed + * @return a UUID for the new name */ - public RenamedNamedNull skolemizeNamedNull(String name) { + public UUID getFreshName(String name) { byte[] nameBytes = name.getBytes(); ByteArrayOutputStream stream = new ByteArrayOutputStream(); stream.write(namedNullNamespace, 0, namedNullNamespace.length); stream.write(nameBytes, 0, nameBytes.length); - return new RenamedNamedNull(UUID.nameUUIDFromBytes(stream.toByteArray())); + return UUID.nameUUIDFromBytes(stream.toByteArray()); } } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SkolemizationTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SkolemizationTest.java index 51dc0fe67..a382aa220 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SkolemizationTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SkolemizationTest.java @@ -22,11 +22,11 @@ import static org.junit.Assert.*; -import java.io.IOException; - import org.junit.Before; import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; public class SkolemizationTest { private Skolemization skolemization; @@ -39,38 +39,60 @@ public void init() { } @Test - public void skolemizeNamedNull_sameName_mapsToSameNamedNull() throws IOException { - NamedNull null1 = skolemization.skolemizeNamedNull(name1); - NamedNull null2 = skolemization.skolemizeNamedNull(name1); + public void skolemizeNamedNull_sameName_mapsToSameNamedNull() { + NamedNull null1 = skolemization.getRenamedNamedNull(name1); + NamedNull null2 = skolemization.getRenamedNamedNull(name1); assertEquals(null1.getName(), null2.getName()); } @Test - public void skolemizeNamedNull_differentName_mapsToDifferentNamedNull() throws IOException { - NamedNull null1 = skolemization.skolemizeNamedNull(name1); - NamedNull null2 = skolemization.skolemizeNamedNull(name2); + public void skolemizeNamedNull_differentName_mapsToDifferentNamedNull() { + NamedNull null1 = skolemization.getRenamedNamedNull(name1); + NamedNull null2 = skolemization.getRenamedNamedNull(name2); assertNotEquals(null1.getName(), null2.getName()); } @Test - public void skolemizeNamedNull_differentInstances_mapsToDifferentNamedNull() throws IOException { - NamedNull null1 = skolemization.skolemizeNamedNull(name1); + public void skolemizeNamedNull_differentInstances_mapsToDifferentNamedNull() { + NamedNull null1 = skolemization.getRenamedNamedNull(name1); Skolemization other = new Skolemization(); - NamedNull null2 = other.skolemizeNamedNull(name1); + NamedNull null2 = other.getRenamedNamedNull(name1); assertNotEquals(null1.getName(), null2.getName()); } @Test - public void skolemizeNamedNull_differentInstancesDifferentNames_mapsToDifferentNamedNull() throws IOException { - NamedNull null1 = skolemization.skolemizeNamedNull(name1); + public void skolemizeNamedNull_differentInstancesDifferentNames_mapsToDifferentNamedNull() { + NamedNull null1 = skolemization.getRenamedNamedNull(name1); Skolemization other = new Skolemization(); - NamedNull null2 = other.skolemizeNamedNull(name2); + NamedNull null2 = other.getRenamedNamedNull(name2); assertNotEquals(null1.getName(), null2.getName()); - assertEquals(null1.getName(), skolemization.skolemizeNamedNull(name1).getName()); - assertEquals(null2.getName(), other.skolemizeNamedNull(name2).getName()); + assertEquals(null1.getName(), skolemization.getRenamedNamedNull(name1).getName()); + assertEquals(null2.getName(), other.getRenamedNamedNull(name2).getName()); + } + + @Test + public void skolemConstant_succeeds() { + AbstractConstant skolem = skolemization.getSkolemConstant(name1); + assertTrue(skolem.getName().startsWith(Skolemization.SKOLEM_IRI_PREFIX)); + } + + @Test + public void skolemConstantFromNamedNull_succeeds() { + NamedNull null1 = new NamedNullImpl(name1); + AbstractConstant skolem1 = skolemization.getSkolemConstant(null1); + AbstractConstant skolem2 = skolemization.getSkolemConstant(name1); + assertEquals(skolem2, skolem1); + } + + @Test + public void skolemConstantFromRenamedNamedNull_succeeds() { + NamedNull null1 = skolemization.getRenamedNamedNull(name1); + AbstractConstant skolem1 = skolemization.getSkolemConstant(null1); + AbstractConstant skolem2 = skolemization.getSkolemConstant(name1); + assertEquals(skolem2, skolem1); } } diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java index f5c737dc2..7028342cf 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java @@ -64,7 +64,7 @@ public static Term getIndividualTerm(final OWLIndividual owlIndividual, Skolemiz if (owlIndividual instanceof OWLNamedIndividual) { return new AbstractConstantImpl(((OWLNamedIndividual) owlIndividual).getIRI().toString()); } else if (owlIndividual instanceof OWLAnonymousIndividual) { - return skolemization.skolemizeNamedNull(((OWLAnonymousIndividual) owlIndividual).getID().toString()); + return skolemization.getRenamedNamedNull(((OWLAnonymousIndividual) owlIndividual).getID().toString()); } else { throw new OwlFeatureNotSupportedException( "Could not convert OWL individual '" + owlIndividual.toString() + "' to a term."); diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java index 9ade274bf..f367bb382 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java @@ -162,7 +162,7 @@ Constant createConstant(String lexicalForm, String datatype) throws ParseExcepti } NamedNull createNamedNull(String lexicalForm) throws ParseException { - return this.skolemization.skolemizeNamedNull(lexicalForm); + return this.skolemization.getRenamedNamedNull(lexicalForm); } void addStatement(Statement statement) { diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java index 13a078076..84c224460 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java @@ -97,7 +97,7 @@ public static String getVLogNameForNamedNull(final NamedNull namedNull) { if (namedNull instanceof RenamedNamedNull) { return namedNull.getName(); } else { - return skolemization.skolemizeNamedNull(namedNull.getName()).getName(); + return skolemization.getRenamedNamedNull(namedNull.getName()).getName(); } } From 8b35c30c64f4592edc441657043d96e038072702 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 26 Aug 2020 13:37:26 +0200 Subject: [PATCH 0755/1003] +method to get skolem name from named null --- .../implementation/Skolemization.java | 24 +++++++++++++++---- 1 file changed, 19 insertions(+), 5 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java index 35859a969..f8fae3c0f 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java @@ -92,11 +92,8 @@ public AbstractConstant getSkolemConstant(String name) { * instance and {@code namedNull}. */ public AbstractConstant getSkolemConstant(NamedNull namedNull) { - if (namedNull instanceof RenamedNamedNull) { - return new AbstractConstantImpl(getSkolemConstantNameFromUniqueName(namedNull.getName())); - } else { - return new AbstractConstantImpl(getSkolemConstantName(namedNull.getName())); - } + return new AbstractConstantImpl(getSkolemConstantName(namedNull)); + } /** @@ -111,6 +108,23 @@ public String getSkolemConstantName(String name) { return getSkolemConstantNameFromUniqueName(getFreshName(name).toString()); } + /** + * Returns the name (IRI string) of a skolem constant for skolemising the given + * named {@link NamedNull}. The method ensures that a new unique name is + * generated unless the given object is already a {@link RenamedNamedNull}. + * + * @param name the name of the {@link NamedNull} to be renamed here (or any + * other string for which to create a unique renaming) + * @return string that is an IRI for a skolem constant + */ + public String getSkolemConstantName(NamedNull namedNull) { + if (namedNull instanceof RenamedNamedNull) { + return getSkolemConstantNameFromUniqueName(namedNull.getName()); + } else { + return getSkolemConstantName(namedNull.getName()); + } + } + /** * Returns a full skolem constant IRI string from its local id part. * From 0e513c57398a8b9de146dbee02b4fc5a2f8d12e7 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 26 Aug 2020 13:52:13 +0200 Subject: [PATCH 0756/1003] changed prefix for local skolem names --- .../rulewerk/core/reasoner/implementation/Skolemization.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java index f8fae3c0f..e51a6b5d7 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java @@ -49,7 +49,7 @@ public class Skolemization { /** * Prefix used to ensure that UUID-based local names do not start with a number. */ - private final static String SKOLEM_UUID_START = "ID"; + private final static String SKOLEM_UUID_START = "B-"; /** * The namespace to use for skolemizing named null names. From 2529528f730d81b3787db28c43192261a6c737d5 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 26 Aug 2020 13:53:05 +0200 Subject: [PATCH 0757/1003] skolemize properly --- .../rulewerk/reasoner/vlog/TermToVLogConverter.java | 7 +------ .../reasoner/vlog/ModelToVLogConverterTest.java | 12 +++++++++--- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java index 84c224460..3e294f848 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java @@ -30,7 +30,6 @@ import org.semanticweb.rulewerk.core.model.api.TermType; import org.semanticweb.rulewerk.core.model.api.TermVisitor; import org.semanticweb.rulewerk.core.model.api.UniversalVariable; -import org.semanticweb.rulewerk.core.model.implementation.RenamedNamedNull; import org.semanticweb.rulewerk.core.reasoner.implementation.Skolemization; /** @@ -94,11 +93,7 @@ public static String getVLogNameForConstant(final Constant constant) { * @return VLog constant string */ public static String getVLogNameForNamedNull(final NamedNull namedNull) { - if (namedNull instanceof RenamedNamedNull) { - return namedNull.getName(); - } else { - return skolemization.getRenamedNamedNull(namedNull.getName()).getName(); - } + return skolemization.getSkolemConstantName(namedNull); } /** diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ModelToVLogConverterTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ModelToVLogConverterTest.java index 653cb5401..70c188ffc 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ModelToVLogConverterTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ModelToVLogConverterTest.java @@ -47,6 +47,7 @@ import org.semanticweb.rulewerk.core.model.implementation.RenamedNamedNull; import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.reasoner.RuleRewriteStrategy; +import org.semanticweb.rulewerk.core.reasoner.implementation.Skolemization; public class ModelToVLogConverterTest { @@ -125,22 +126,26 @@ public void testToVLogTermBlank() { @Test public void testToVLogTermBlankSkolemization() { + final Skolemization skolemization = new Skolemization(); final NamedNull blank = new NamedNullImpl("blank"); final String vLogSkolemConstant = TermToVLogConverter.getVLogNameForNamedNull(blank); assertNotEquals("blank", vLogSkolemConstant); - assertEquals(36, vLogSkolemConstant.length()); // length of a UUID + // generated ids differ by Skolemization instance, but should have the same + // length: + assertEquals(skolemization.getSkolemConstantName(blank).length(), vLogSkolemConstant.length()); } @Test public void testToVLogTermBlankRenamedSkolemization() { + final Skolemization skolemization = new Skolemization(); final UUID uuid = UUID.randomUUID(); final NamedNull blank = new RenamedNamedNull(uuid); final String vLogSkolemConstant = TermToVLogConverter.getVLogNameForNamedNull(blank); - assertEquals(uuid.toString(), vLogSkolemConstant); + assertEquals(skolemization.getSkolemConstantName(blank), vLogSkolemConstant); } @Test @@ -192,11 +197,12 @@ public void testToVLogFactTuples() { @Test public void testToVLogFactTupleNulls() { + final Skolemization skolemization = new Skolemization(); final UUID uuid = UUID.randomUUID(); final NamedNull n = new RenamedNamedNull(uuid); final Fact atom1 = Expressions.makeFact("p1", Arrays.asList(n)); - final String[] expectedTuple = { uuid.toString() }; + final String[] expectedTuple = { skolemization.getSkolemConstantName(n) }; final String[] actualTuple = ModelToVLogConverter.toVLogFactTuple(atom1); From fb83ad6ef382192428dde0d56d05e9aa8e5f12dd Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 26 Aug 2020 14:47:36 +0200 Subject: [PATCH 0758/1003] make RDF converter non-static --- .../commands/LoadCommandInterpreter.java | 3 +- .../examples/rdf/AddDataFromRdfModel.java | 13 ++- .../rulewerk/rdf/RdfModelConverter.java | 39 +++++-- .../rulewerk/rdf/RdfValueToTermConverter.java | 45 ++++++-- .../rdf/RdfValueToTermConverterTest.java | 104 ++++++++++++++++++ .../rdf/TestConvertRdfFileToFacts.java | 16 +-- .../rulewerk/rdf/TestReasonOverRdfFacts.java | 6 +- 7 files changed, 191 insertions(+), 35 deletions(-) create mode 100644 rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverterTest.java diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index 3fc22b564..ec8a2472d 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -165,7 +165,8 @@ private void loadRdf(Interpreter interpreter, String fileName) throws CommandExe throw new CommandExecutionException(message); } - interpreter.getKnowledgeBase().addStatements(RdfModelConverter.rdfModelToFacts(model)); + RdfModelConverter rdfModelConverter = new RdfModelConverter(true); + interpreter.getKnowledgeBase().addStatements(rdfModelConverter.rdfModelToFacts(model)); for (Namespace namespace : model.getNamespaces()) { try { interpreter.getKnowledgeBase().getPrefixDeclarationRegistry() diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java index 79298dd2c..8600edae6 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java @@ -65,6 +65,7 @@ public static void main(final String[] args) throws IOException, RDFParseException, RDFHandlerException, URISyntaxException { ExamplesUtils.configureLogging(); + RdfModelConverter rdfModelConverter = new RdfModelConverter(); /* * Local file containing metadata of publications from ISWC'16 conference, in @@ -77,10 +78,10 @@ public static void main(final String[] args) RDFFormat.RDFXML); /* - * Using rulewerk-rdf library, we convert RDF Model triples to facts, each having - * the ternary predicate "TRIPLE". + * Using rulewerk-rdf library, we convert RDF Model triples to facts, each + * having the ternary predicate "TRIPLE". */ - final Set tripleFactsISWC2016 = RdfModelConverter.rdfModelToFacts(rdfModelISWC2016); + final Set tripleFactsISWC2016 = rdfModelConverter.rdfModelToFacts(rdfModelISWC2016); System.out.println("Example triple fact from iswc-2016 dataset:"); System.out.println(" - " + tripleFactsISWC2016.iterator().next()); @@ -96,10 +97,10 @@ public static void main(final String[] args) RDFFormat.TURTLE); /* - * Using rulewerk-rdf library, we convert RDF Model triples to facts, each having - * the ternary predicate "TRIPLE". + * Using rulewerk-rdf library, we convert RDF Model triples to facts, each + * having the ternary predicate "TRIPLE". */ - final Set tripleFactsISWC2017 = RdfModelConverter.rdfModelToFacts(rdfModelISWC2017); + final Set tripleFactsISWC2017 = rdfModelConverter.rdfModelToFacts(rdfModelISWC2017); System.out.println("Example triple fact from iswc-2017 dataset:"); System.out.println(" - " + tripleFactsISWC2017.iterator().next()); diff --git a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java index e5547b8dd..986d044b7 100644 --- a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java +++ b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java @@ -76,32 +76,55 @@ public final class RdfModelConverter { */ public static final Predicate RDF_TRIPLE_PREDICATE = Expressions.makePredicate(RDF_TRIPLE_PREDICATE_NAME, 3); - private RdfModelConverter() { + final RdfValueToTermConverter rdfValueToTermConverter; + + /** + * Construct an object that does not skolemize blank nodes. + */ + public RdfModelConverter() { + this(false); + } + + /** + * Constructor. + * + * @param skolemize if true, blank nodes are translated to constants with + * generated IRIs; otherwise they are replanced by named nulls + * with generated ids + */ + public RdfModelConverter(boolean skolemize) { + rdfValueToTermConverter = new RdfValueToTermConverter(skolemize); } /** * Converts each {@code } triple statement of the - * given {@code rdfModel} into a {@link PositiveLiteral} of the form + * given {@code rdfModel} into a {@link Fact} of the form * {@code TRIPLE(subject, predicate, object)}. See * {@link RdfModelConverter#RDF_TRIPLE_PREDICATE}, the ternary predicate used * for all literals generated from RDF triples. * * @param rdfModel a {@link Model} of an RDF document, containing triple * statements that will be converter to facts. - * @return a set of literals corresponding to the statements of given + * @return a set of facts corresponding to the statements of given * {@code rdfModel}. */ - public static Set rdfModelToFacts(final Model rdfModel) { - return rdfModel.stream().map(RdfModelConverter::rdfStatementToFact).collect(Collectors.toSet()); + public Set rdfModelToFacts(final Model rdfModel) { + return rdfModel.stream().map((statement) -> rdfStatementToFact(statement)).collect(Collectors.toSet()); } - static Fact rdfStatementToFact(final Statement statement) { + /** + * Converts an RDF statement (triple) to a Rulewerk {@link Fact}. + * + * @param statement + * @return + */ + Fact rdfStatementToFact(final Statement statement) { final Resource subject = statement.getSubject(); final URI predicate = statement.getPredicate(); final Value object = statement.getObject(); - return Expressions.makeFact(RDF_TRIPLE_PREDICATE, Arrays.asList(RdfValueToTermConverter.rdfValueToTerm(subject), - RdfValueToTermConverter.rdfValueToTerm(predicate), RdfValueToTermConverter.rdfValueToTerm(object))); + return Expressions.makeFact(RDF_TRIPLE_PREDICATE, Arrays.asList(rdfValueToTermConverter.convertValue(subject), + rdfValueToTermConverter.convertValue(predicate), rdfValueToTermConverter.convertValue(object))); } } diff --git a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java index 058228665..e12704c3c 100644 --- a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java +++ b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java @@ -29,39 +29,62 @@ import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Term; -import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.reasoner.implementation.Skolemization; import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; import org.semanticweb.rulewerk.core.model.implementation.DatatypeConstantImpl; import org.semanticweb.rulewerk.core.model.implementation.LanguageStringConstantImpl; +/** + * Helper class to convert RDF ters to Rulewerk {@link Term} objects. + * + * @author Markus Kroetzsch + * + */ final class RdfValueToTermConverter { - private RdfValueToTermConverter() { + final boolean skolemize; + final Skolemization skolemization = new Skolemization(); + + /** + * Constructor. + * + * @param skolemize if true, blank nodes are translated to constants with + * generated IRIs; otherwise they are replanced by named nulls + * with generated ids + */ + public RdfValueToTermConverter(boolean skolemize) { + this.skolemize = skolemize; } - static Term rdfValueToTerm(final Value value) { + public Term convertValue(final Value value) { if (value instanceof BNode) { - return rdfBlankNodeToBlank((BNode) value); + return convertBlankNode((BNode) value); } else if (value instanceof Literal) { - return rdfLiteralToConstant((Literal) value); + return convertLiteral((Literal) value); } else if (value instanceof URI) { - return rdfUriToConstant((URI) value); + return convertUri((URI) value); } else { throw new RulewerkRuntimeException("Unknown value type: " + value.getClass()); } } - static Term rdfBlankNodeToBlank(final BNode bNode) { - // IDs are generated to be unique in every Model. - return new NamedNullImpl(bNode.getID()); + public Term convertBlankNode(final BNode bNode) { + // Note: IDs are generated to be unique in every Model, so our renaming might be + // redundant. But we want a RenamedNamedNull here, and a consistent name format + // is nice too. + if (skolemize) { + return skolemization.getSkolemConstant(bNode.getID()); + } else { + return skolemization.getRenamedNamedNull(bNode.getID()); + } } - static Term rdfUriToConstant(final URI uri) { + public Term convertUri(final URI uri) { final String escapedURIString = NTriplesUtil.escapeString(uri.toString()); return new AbstractConstantImpl(escapedURIString); } - static Term rdfLiteralToConstant(final Literal literal) { + public Term convertLiteral(final Literal literal) { final URI datatype = literal.getDatatype(); if (datatype != null) { return new DatatypeConstantImpl(XMLDatatypeUtil.normalize(literal.getLabel(), datatype), diff --git a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverterTest.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverterTest.java new file mode 100644 index 000000000..2bc4879f7 --- /dev/null +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverterTest.java @@ -0,0 +1,104 @@ +package org.semanticweb.rulewerk.rdf; + +import static org.junit.Assert.*; + +import org.junit.Test; +import org.mockito.Mockito; +import org.openrdf.model.BNode; +import org.openrdf.model.Literal; +import org.openrdf.model.URI; +import org.openrdf.model.Value; +import org.openrdf.model.impl.BNodeImpl; +import org.openrdf.model.impl.LiteralImpl; +import org.openrdf.model.impl.URIImpl; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; +import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.TermType; +import org.semanticweb.rulewerk.core.reasoner.implementation.Skolemization; + +public class RdfValueToTermConverterTest { + + @Test + public void convertUri_succeeds() { + URI uri = new URIImpl("http://example.org"); + + RdfValueToTermConverter converter = new RdfValueToTermConverter(true); + Term term = converter.convertValue(uri); + + assertEquals(TermType.ABSTRACT_CONSTANT, term.getType()); + assertEquals("http://example.org", term.getName()); + } + + @Test + public void convertLiteralDatatype_succeeds() { + Literal literal = new LiteralImpl("42", new URIImpl("http://example.org/integer")); + + RdfValueToTermConverter converter = new RdfValueToTermConverter(true); + Term term = converter.convertValue(literal); + + assertEquals(TermType.DATATYPE_CONSTANT, term.getType()); + DatatypeConstant datataypeConstant = (DatatypeConstant) term; + assertEquals("http://example.org/integer", datataypeConstant.getDatatype()); + assertEquals("42", datataypeConstant.getLexicalValue()); + } + + @Test + public void convertLiteralLanguage_succeeds() { + Literal literal = new LiteralImpl("Test", "de"); + + RdfValueToTermConverter converter = new RdfValueToTermConverter(true); + Term term = converter.convertValue(literal); + + assertEquals(TermType.LANGSTRING_CONSTANT, term.getType()); + LanguageStringConstant langStringConstant = (LanguageStringConstant) term; + assertEquals("Test", langStringConstant.getString()); + assertEquals("de", langStringConstant.getLanguageTag()); + } + + @Test + public void convertLiteralString_succeeds() { + Literal literal = new LiteralImpl("RDF 1.0 untyped"); + + RdfValueToTermConverter converter = new RdfValueToTermConverter(true); + Term term = converter.convertValue(literal); + + assertEquals(TermType.DATATYPE_CONSTANT, term.getType()); + DatatypeConstant datataypeConstant = (DatatypeConstant) term; + assertEquals(PrefixDeclarationRegistry.XSD_STRING, datataypeConstant.getDatatype()); + assertEquals("RDF 1.0 untyped", datataypeConstant.getLexicalValue()); + } + + @Test + public void convertBNodeSkolemize_succeeds() { + BNode bnode = new BNodeImpl("myid"); + + RdfValueToTermConverter converter = new RdfValueToTermConverter(true); + Term term = converter.convertValue(bnode); + + assertEquals(TermType.ABSTRACT_CONSTANT, term.getType()); + assertTrue(term.getName().startsWith(Skolemization.SKOLEM_IRI_PREFIX)); + } + + @Test + public void convertBNode_succeeds() { + BNode bnode = new BNodeImpl("myid"); + + RdfValueToTermConverter converter = new RdfValueToTermConverter(false); + Term term = converter.convertValue(bnode); + + assertEquals(TermType.NAMED_NULL, term.getType()); + assertNotEquals("myid", term.getName()); + } + + @Test(expected=RulewerkRuntimeException.class) + public void convertValueUnkownType_fails() { + Value value = Mockito.mock(Value.class); + + RdfValueToTermConverter converter = new RdfValueToTermConverter(true); + converter.convertValue(value); + } + +} diff --git a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestConvertRdfFileToFacts.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestConvertRdfFileToFacts.java index 052ab1f5a..6957d5646 100644 --- a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestConvertRdfFileToFacts.java +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestConvertRdfFileToFacts.java @@ -48,6 +48,8 @@ import org.semanticweb.rulewerk.core.model.implementation.Expressions; public class TestConvertRdfFileToFacts { + + final RdfModelConverter rdfModelConverter = new RdfModelConverter(); // FIXME: The openrdf parser does neither support '\b' nor '\f' (from ASCII) and // encodes such characters as "\u0008" and "\u000C", respectively (the @@ -127,7 +129,7 @@ public class TestConvertRdfFileToFacts { public void testDataTypesNormalized() throws RDFHandlerException, RDFParseException, IOException { final Model model = RdfTestUtils .parseFile(new File(RdfTestUtils.INPUT_FOLDER + "unnormalizedLiteralValues.ttl"), RDFFormat.TURTLE); - final Set facts = RdfModelConverter.rdfModelToFacts(model); + final Set facts = rdfModelConverter.rdfModelToFacts(model); assertEquals(expectedNormalizedFacts, facts); } @@ -135,7 +137,7 @@ public void testDataTypesNormalized() throws RDFHandlerException, RDFParseExcept public void testLiteralValuesPreserved() throws RDFHandlerException, RDFParseException, IOException { final Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "literalValues.ttl"), RDFFormat.TURTLE); - final Set facts = RdfModelConverter.rdfModelToFacts(model); + final Set facts = rdfModelConverter.rdfModelToFacts(model); assertEquals(expectedLiteralFacts, facts); } @@ -143,7 +145,7 @@ public void testLiteralValuesPreserved() throws RDFHandlerException, RDFParseExc public void testRelativeURIsMadeAbsolute() throws RDFHandlerException, RDFParseException, IOException { final Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "relativeURIs.ttl"), RDFFormat.TURTLE); - final Set facts = RdfModelConverter.rdfModelToFacts(model); + final Set facts = rdfModelConverter.rdfModelToFacts(model); assertEquals(expectedRelativeUriFacts, facts); } @@ -151,7 +153,7 @@ public void testRelativeURIsMadeAbsolute() throws RDFHandlerException, RDFParseE public void testEscapedCharactersExpanded() throws RDFHandlerException, RDFParseException, IOException { final Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "escapedCharacters.ttl"), RDFFormat.TURTLE); - final Set facts = RdfModelConverter.rdfModelToFacts(model); + final Set facts = rdfModelConverter.rdfModelToFacts(model); assertEquals(expectedEscapedCharacterFacts, facts); } @@ -159,7 +161,7 @@ public void testEscapedCharactersExpanded() throws RDFHandlerException, RDFParse public void testLanguageTagsPreserved() throws RDFHandlerException, RDFParseException, IOException { final Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "languageTags.ttl"), RDFFormat.TURTLE); - final Set facts = RdfModelConverter.rdfModelToFacts(model); + final Set facts = rdfModelConverter.rdfModelToFacts(model); assertEquals(expectedLanguageTagFacts, facts); } @@ -167,7 +169,7 @@ public void testLanguageTagsPreserved() throws RDFHandlerException, RDFParseExce public void testCollectionsPreserved() throws RDFHandlerException, RDFParseException, IOException { final Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "collections.ttl"), RDFFormat.TURTLE); - final Set factsFromModel = RdfModelConverter.rdfModelToFacts(model); + final Set factsFromModel = rdfModelConverter.rdfModelToFacts(model); final Term blank1 = RdfTestUtils.getObjectOfFirstMatchedTriple(file2, fileA, factsFromModel); final Term blank2 = RdfTestUtils.getObjectOfFirstMatchedTriple(file3, fileA, factsFromModel); @@ -215,7 +217,7 @@ public void testBlankNodesWithSameLabelAreDifferentInDifferentModels() private Set getBlanksFromTurtleFile(final File file) throws RDFParseException, RDFHandlerException, IOException { final Model model = RdfTestUtils.parseFile(file, RDFFormat.TURTLE); - final Set facts = RdfModelConverter.rdfModelToFacts(model); + final Set facts = rdfModelConverter.rdfModelToFacts(model); final Set blanks = new HashSet<>(); facts.forEach(fact -> blanks.addAll(fact.getNamedNulls().collect(Collectors.toSet()))); diff --git a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java index 214d32d6e..ce4d9f3b8 100644 --- a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java @@ -47,6 +47,8 @@ import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; public class TestReasonOverRdfFacts { + + final RdfModelConverter rdfModelConverter = new RdfModelConverter(); private final Constant carlBenz = Expressions.makeAbstractConstant("https://example.org/Carl-Benz"); private final Constant invention = Expressions.makeAbstractConstant("https://example.org/invention"); @@ -64,7 +66,7 @@ public class TestReasonOverRdfFacts { public void testCanLoadRdfFactsIntoReasoner() throws RDFParseException, RDFHandlerException, IOException { final Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "exampleFacts.ttl"), RDFFormat.TURTLE); - final Set facts = RdfModelConverter.rdfModelToFacts(model); + final Set facts = rdfModelConverter.rdfModelToFacts(model); final KnowledgeBase kb = new KnowledgeBase(); kb.addStatements(facts); @@ -83,7 +85,7 @@ public void testCanLoadRdfFactsIntoReasoner() throws RDFParseException, RDFHandl public void testQueryAnsweringOverRdfFacts() throws RDFParseException, RDFHandlerException, IOException { final Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "exampleFacts.ttl"), RDFFormat.TURTLE); - final Set facts = RdfModelConverter.rdfModelToFacts(model); + final Set facts = rdfModelConverter.rdfModelToFacts(model); final KnowledgeBase kb = new KnowledgeBase(); kb.addStatements(facts); From 61e89a6c615952e79008e0be17c93c28f8aa7032 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 26 Aug 2020 15:18:45 +0200 Subject: [PATCH 0759/1003] more features for RDF converter --- .../commands/LoadCommandInterpreter.java | 12 +-- .../rulewerk/rdf/RdfModelConverter.java | 60 +++++++++++++- .../src/test/data/input/test-turtle.ttl | 3 + ...a => IntegrationTestsConvertRdfFiles.java} | 2 +- ...grationTestsReasonOverRdfFactsinVLog.java} | 2 +- .../rulewerk/rdf/RdfModelConverterTest.java | 83 +++++++++++++++++++ 6 files changed, 145 insertions(+), 17 deletions(-) create mode 100644 rulewerk-rdf/src/test/data/input/test-turtle.ttl rename rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/{TestConvertRdfFileToFacts.java => IntegrationTestsConvertRdfFiles.java} (99%) rename rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/{TestReasonOverRdfFacts.java => IntegrationTestsReasonOverRdfFactsinVLog.java} (98%) create mode 100644 rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfModelConverterTest.java diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index ec8a2472d..835a506b9 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -31,7 +31,6 @@ import java.util.List; import org.openrdf.model.Model; -import org.openrdf.model.Namespace; import org.openrdf.model.impl.LinkedHashModel; import org.openrdf.rio.RDFFormat; import org.openrdf.rio.RDFHandlerException; @@ -43,7 +42,6 @@ import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyCreationException; import org.semanticweb.owlapi.model.OWLOntologyManager; -import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.model.api.TermType; import org.semanticweb.rulewerk.owlapi.OwlToRulesConverter; @@ -166,15 +164,7 @@ private void loadRdf(Interpreter interpreter, String fileName) throws CommandExe } RdfModelConverter rdfModelConverter = new RdfModelConverter(true); - interpreter.getKnowledgeBase().addStatements(rdfModelConverter.rdfModelToFacts(model)); - for (Namespace namespace : model.getNamespaces()) { - try { - interpreter.getKnowledgeBase().getPrefixDeclarationRegistry() - .setPrefixIri(namespace.getPrefix() + ":", namespace.getName()); - } catch (PrefixDeclarationException e) { - // ignore this prefix - } - } + rdfModelConverter.addAll(interpreter.getKnowledgeBase(), model); } catch (IOException e) { throw new CommandExecutionException("Could not read input: " + e.getMessage(), e); } diff --git a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java index 986d044b7..fb14af98b 100644 --- a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java +++ b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java @@ -27,17 +27,22 @@ import org.openrdf.model.BNode; import org.openrdf.model.Literal; import org.openrdf.model.Model; +import org.openrdf.model.Namespace; import org.openrdf.model.Resource; import org.openrdf.model.Statement; import org.openrdf.model.URI; import org.openrdf.model.Value; import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Class for converting RDF {@link Model}s to {@link PositiveLiteral} sets. @@ -59,10 +64,13 @@ * * * @author Irina Dragoste + * @author Markus Kroetzsch * */ public final class RdfModelConverter { + private static Logger LOGGER = LoggerFactory.getLogger(RdfModelConverter.class); + /** * The name of the ternary predicate of literals generated from RDF triples: * "TRIPLE". @@ -103,13 +111,57 @@ public RdfModelConverter(boolean skolemize) { * {@link RdfModelConverter#RDF_TRIPLE_PREDICATE}, the ternary predicate used * for all literals generated from RDF triples. * - * @param rdfModel a {@link Model} of an RDF document, containing triple - * statements that will be converter to facts. + * @param model a {@link Model} of an RDF document, containing triple statements + * that will be converter to facts. * @return a set of facts corresponding to the statements of given * {@code rdfModel}. */ - public Set rdfModelToFacts(final Model rdfModel) { - return rdfModel.stream().map((statement) -> rdfStatementToFact(statement)).collect(Collectors.toSet()); + public Set rdfModelToFacts(final Model model) { + return model.stream().map((statement) -> rdfStatementToFact(statement)).collect(Collectors.toSet()); + } + + /** + * Adds data and prefix declarations from a given RDF {@link Model} to a given + * {@link KnowledgeBase}. + * + * @param knowledgeBase the {@link KnowledgeBase} to add to + * @param model the {@link Model} with the RDF data + */ + public void addAll(KnowledgeBase knowledgeBase, Model model) { + addPrefixes(knowledgeBase, model); + addFacts(knowledgeBase, model); + } + + /** + * Adds the data from a given RDF {@link Model} as {@link Fact}s to the given + * {@link KnowledgeBase}. + * + * @param knowledgeBase the {@link KnowledgeBase} to add {@link Fact}s to + * @param model the {@link Model} with the RDF data + */ + public void addFacts(KnowledgeBase knowledgeBase, Model model) { + model.stream().forEach((statement) -> { + knowledgeBase.addStatement(rdfStatementToFact(statement)); + }); + } + + /** + * Adds the prefixes declared for a given RDF {@link Model} to the given + * {@link KnowledgeBase}. If a prefix cannot be added for some reason, it is + * ignored and a warning is logged. + * + * @param knowledgeBase the {@link KnowledgeBase} to add prefix declarations to + * @param model the {@link Model} with the RDF data + */ + public void addPrefixes(KnowledgeBase knowledgeBase, Model model) { + for (Namespace namespace : model.getNamespaces()) { + try { + knowledgeBase.getPrefixDeclarationRegistry().setPrefixIri(namespace.getPrefix() + ":", + namespace.getName()); + } catch (PrefixDeclarationException e) { + LOGGER.warn("Failed to set prefix \"" + namespace.getPrefix() + "\" from RDF model: " + e.getMessage()); + } + } } /** diff --git a/rulewerk-rdf/src/test/data/input/test-turtle.ttl b/rulewerk-rdf/src/test/data/input/test-turtle.ttl new file mode 100644 index 000000000..3fbe612de --- /dev/null +++ b/rulewerk-rdf/src/test/data/input/test-turtle.ttl @@ -0,0 +1,3 @@ +@prefix : . + +:a :b :c . diff --git a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestConvertRdfFileToFacts.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/IntegrationTestsConvertRdfFiles.java similarity index 99% rename from rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestConvertRdfFileToFacts.java rename to rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/IntegrationTestsConvertRdfFiles.java index 6957d5646..823a1589f 100644 --- a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestConvertRdfFileToFacts.java +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/IntegrationTestsConvertRdfFiles.java @@ -47,7 +47,7 @@ import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.Expressions; -public class TestConvertRdfFileToFacts { +public class IntegrationTestsConvertRdfFiles { final RdfModelConverter rdfModelConverter = new RdfModelConverter(); diff --git a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/IntegrationTestsReasonOverRdfFactsinVLog.java similarity index 98% rename from rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java rename to rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/IntegrationTestsReasonOverRdfFactsinVLog.java index ce4d9f3b8..4143669f4 100644 --- a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/IntegrationTestsReasonOverRdfFactsinVLog.java @@ -46,7 +46,7 @@ import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; -public class TestReasonOverRdfFacts { +public class IntegrationTestsReasonOverRdfFactsinVLog { final RdfModelConverter rdfModelConverter = new RdfModelConverter(); diff --git a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfModelConverterTest.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfModelConverterTest.java new file mode 100644 index 000000000..77b280d26 --- /dev/null +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfModelConverterTest.java @@ -0,0 +1,83 @@ +package org.semanticweb.rulewerk.rdf; + +/*- + * #%L + * Rulewerk RDF Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.File; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +import org.junit.Test; +import org.openrdf.model.Model; +import org.openrdf.rio.RDFFormat; +import org.openrdf.rio.RDFHandlerException; +import org.openrdf.rio.RDFParseException; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; + +public class RdfModelConverterTest { + + @Test + public void addToKnowledgeBase_succeeds() + throws RDFParseException, RDFHandlerException, IOException, PrefixDeclarationException { + RdfModelConverter rdfModelConverter = new RdfModelConverter(); + Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "test-turtle.ttl"), RDFFormat.TURTLE); + KnowledgeBase knowledgeBase = new KnowledgeBase(); + + Predicate predicate = Expressions.makePredicate("TRIPLE", 3); + Term terma = Expressions.makeAbstractConstant("http://example.org/a"); + Term termb = Expressions.makeAbstractConstant("http://example.org/b"); + Term termc = Expressions.makeAbstractConstant("http://example.org/c"); + Fact fact = Expressions.makeFact(predicate, terma, termb, termc); + + rdfModelConverter.addAll(knowledgeBase, model); + + assertEquals(Arrays.asList(fact), knowledgeBase.getFacts()); + assertEquals("http://example.org/", knowledgeBase.getPrefixIri(":")); + } + + @Test + public void getFactSet_succeeds() + throws RDFParseException, RDFHandlerException, IOException, PrefixDeclarationException { + RdfModelConverter rdfModelConverter = new RdfModelConverter(); + Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "test-turtle.ttl"), RDFFormat.TURTLE); + + Predicate predicate = Expressions.makePredicate("TRIPLE", 3); + Term terma = Expressions.makeAbstractConstant("http://example.org/a"); + Term termb = Expressions.makeAbstractConstant("http://example.org/b"); + Term termc = Expressions.makeAbstractConstant("http://example.org/c"); + Fact fact = Expressions.makeFact(predicate, terma, termb, termc); + Set expected = new HashSet(); + expected.add(fact); + + Set facts = rdfModelConverter.rdfModelToFacts(model); + + assertEquals(expected, facts); + } + +} From 6e702a9133a3a8bc9f02e9080d7ef404c3a0d2a3 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 26 Aug 2020 15:23:21 +0200 Subject: [PATCH 0760/1003] mention RDF improvements --- RELEASE-NOTES.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index ec4e55973..d7fbee0a6 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -9,15 +9,18 @@ New features: * Significant speedup in iterating over query results * Support for using data from a Trident database, the recommended data source for large RDF graphs in VLog +* More features to control how Rulewerk imports RDF data using rulewerk-rdf module * New class `LiteralQueryResultPrinter` for pretty-printing query results Other improvements: * Improved serialization of knowledge bases (using namespaces) * Simple (non-IRI, namespace-less) predicate names can now include - and _ +* Nulls in input data (aka "blank nodes") are now properly skolemized for VLog * InMemoryGraphAnalysisExample now counts proper triangles using negation to avoid "triangles" where two or more edges are the same. Breaking changes: +* The `RdfModelConverter` class from the rdf package is no longer static (and has more options) * The `Serializer` class in the core package has been replaced by a new implementation with a completely different interface. * The methods `getSerialization` that were present in most syntax objects have been removed. Use `toString()` instead for simple serializations, or invoke a custom Serializer. From be6b025dba338f2706ba296bacf394be7bb50970 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 26 Aug 2020 15:30:47 +0200 Subject: [PATCH 0761/1003] support setting custom triple predicate name --- .../commands/LoadCommandInterpreter.java | 3 +- .../rulewerk/rdf/RdfModelConverter.java | 33 +++++++++---------- .../rulewerk/rdf/RdfModelConverterTest.java | 18 ++++++++++ .../rdf/RdfValueToTermConverterTest.java | 20 +++++++++++ 4 files changed, 56 insertions(+), 18 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index 835a506b9..099e29fe4 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -163,7 +163,8 @@ private void loadRdf(Interpreter interpreter, String fileName) throws CommandExe throw new CommandExecutionException(message); } - RdfModelConverter rdfModelConverter = new RdfModelConverter(true); + RdfModelConverter rdfModelConverter = new RdfModelConverter(true, + RdfModelConverter.RDF_TRIPLE_PREDICATE_NAME); rdfModelConverter.addAll(interpreter.getKnowledgeBase(), model); } catch (IOException e) { throw new CommandExecutionException("Could not read input: " + e.getMessage(), e); diff --git a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java index fb14af98b..fe61636a3 100644 --- a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java +++ b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java @@ -72,36 +72,35 @@ public final class RdfModelConverter { private static Logger LOGGER = LoggerFactory.getLogger(RdfModelConverter.class); /** - * The name of the ternary predicate of literals generated from RDF triples: - * "TRIPLE". + * The name of the ternary predicate of literals generated from RDF triples by + * default. */ public static final String RDF_TRIPLE_PREDICATE_NAME = "TRIPLE"; - /** - * The ternary predicate of literals generated from RDF triples. It has - * {@code name}({@link Predicate#getName()}) "TRIPLE" and - * {@code arity}({@link Predicate#getArity()}) 3. - */ - public static final Predicate RDF_TRIPLE_PREDICATE = Expressions.makePredicate(RDF_TRIPLE_PREDICATE_NAME, 3); - final RdfValueToTermConverter rdfValueToTermConverter; + final Predicate triplePredicate; /** - * Construct an object that does not skolemize blank nodes. + * Construct an object that does not skolemize blank nodes and that uses a + * ternary predicate named {@link RdfModelConverter#RDF_TRIPLE_PREDICATE_NAME} + * for storing triples. */ public RdfModelConverter() { - this(false); + this(false, RDF_TRIPLE_PREDICATE_NAME); } /** * Constructor. * - * @param skolemize if true, blank nodes are translated to constants with - * generated IRIs; otherwise they are replanced by named nulls - * with generated ids + * @param skolemize if true, blank nodes are translated to constants + * with generated IRIs; otherwise they are replanced + * by named nulls with generated ids + * @param triplePredicateName name of the ternary predicate that should be used + * to store RDF triples */ - public RdfModelConverter(boolean skolemize) { - rdfValueToTermConverter = new RdfValueToTermConverter(skolemize); + public RdfModelConverter(boolean skolemize, String triplePredicateName) { + this.rdfValueToTermConverter = new RdfValueToTermConverter(skolemize); + this.triplePredicate = Expressions.makePredicate(triplePredicateName, 3); } /** @@ -175,7 +174,7 @@ Fact rdfStatementToFact(final Statement statement) { final URI predicate = statement.getPredicate(); final Value object = statement.getObject(); - return Expressions.makeFact(RDF_TRIPLE_PREDICATE, Arrays.asList(rdfValueToTermConverter.convertValue(subject), + return Expressions.makeFact(triplePredicate, Arrays.asList(rdfValueToTermConverter.convertValue(subject), rdfValueToTermConverter.convertValue(predicate), rdfValueToTermConverter.convertValue(object))); } diff --git a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfModelConverterTest.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfModelConverterTest.java index 77b280d26..6135416f4 100644 --- a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfModelConverterTest.java +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfModelConverterTest.java @@ -80,4 +80,22 @@ public void getFactSet_succeeds() assertEquals(expected, facts); } + @Test + public void addFactsCustomTriplePredicate_succeeds() + throws RDFParseException, RDFHandlerException, IOException, PrefixDeclarationException { + RdfModelConverter rdfModelConverter = new RdfModelConverter(true, "mytriple"); + Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "test-turtle.ttl"), RDFFormat.TURTLE); + KnowledgeBase knowledgeBase = new KnowledgeBase(); + + Predicate predicate = Expressions.makePredicate("mytriple", 3); + Term terma = Expressions.makeAbstractConstant("http://example.org/a"); + Term termb = Expressions.makeAbstractConstant("http://example.org/b"); + Term termc = Expressions.makeAbstractConstant("http://example.org/c"); + Fact fact = Expressions.makeFact(predicate, terma, termb, termc); + + rdfModelConverter.addFacts(knowledgeBase, model); + + assertEquals(Arrays.asList(fact), knowledgeBase.getFacts()); + } + } diff --git a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverterTest.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverterTest.java index 2bc4879f7..16b40036f 100644 --- a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverterTest.java +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverterTest.java @@ -1,5 +1,25 @@ package org.semanticweb.rulewerk.rdf; +/*- + * #%L + * Rulewerk RDF Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import static org.junit.Assert.*; import org.junit.Test; From 2d1f93f1e67694586087eb6b6bf61074f5c1bb06 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 26 Aug 2020 15:53:55 +0200 Subject: [PATCH 0762/1003] support custom RDF load predicate --- .../commands/LoadCommandInterpreter.java | 29 ++++++++++---- .../commands/LoadCommandInterpreterTest.java | 38 +++++++++++++++++++ 2 files changed, 59 insertions(+), 8 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index 099e29fe4..bec4713dc 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -68,14 +68,26 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio if (command.getArguments().size() > 0 && command.getArguments().get(0).fromTerm().isPresent() && command.getArguments().get(0).fromTerm().get().getType() == TermType.ABSTRACT_CONSTANT) { task = Interpreter.extractNameArgument(command, 0, "task"); - Interpreter.validateArgumentCount(command, 2); pos++; } else { task = TASK_RLS; - Interpreter.validateArgumentCount(command, 1); } String fileName = Interpreter.extractStringArgument(command, pos, "filename"); + pos++; + + String rdfTriplePredicate = RdfModelConverter.RDF_TRIPLE_PREDICATE_NAME; + if (TASK_RDF.equals(task) && command.getArguments().size() > pos) { + if (command.getArguments().get(pos).fromTerm().isPresent() + && command.getArguments().get(pos).fromTerm().get().getType() == TermType.ABSTRACT_CONSTANT) { + rdfTriplePredicate = command.getArguments().get(pos).fromTerm().get().getName(); + pos++; + } else { + throw new CommandExecutionException("Optional triple predicate name must be an IRI."); + } + } + + Interpreter.validateArgumentCount(command, pos); int countRulesBefore = interpreter.getKnowledgeBase().getRules().size(); int countFactsBefore = interpreter.getKnowledgeBase().getFacts().size(); @@ -85,7 +97,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } else if (TASK_OWL.equals(task)) { loadOwl(interpreter, fileName); } else if (TASK_RDF.equals(task)) { - loadRdf(interpreter, fileName); + loadRdf(interpreter, fileName, rdfTriplePredicate); } else { throw new CommandExecutionException( "Unknown task " + task + ". Should be one of " + TASK_RLS + ", " + TASK_OWL + ", " + TASK_RDF); @@ -137,7 +149,8 @@ private void loadOwl(Interpreter interpreter, String fileName) throws CommandExe interpreter.getKnowledgeBase().addStatements(owlToRulesConverter.getFacts()); } - private void loadRdf(Interpreter interpreter, String fileName) throws CommandExecutionException { + private void loadRdf(Interpreter interpreter, String fileName, String triplePredicateName) + throws CommandExecutionException { try { String baseIri = new File(fileName).toURI().toString(); @@ -163,8 +176,7 @@ private void loadRdf(Interpreter interpreter, String fileName) throws CommandExe throw new CommandExecutionException(message); } - RdfModelConverter rdfModelConverter = new RdfModelConverter(true, - RdfModelConverter.RDF_TRIPLE_PREDICATE_NAME); + RdfModelConverter rdfModelConverter = new RdfModelConverter(true, triplePredicateName); rdfModelConverter.addAll(interpreter.getKnowledgeBase(), model); } catch (IOException e) { throw new CommandExecutionException("Could not read input: " + e.getMessage(), e); @@ -182,12 +194,13 @@ private Model parseRdfFromStream(InputStream inputStream, RDFFormat rdfFormat, S @Override public void printHelp(String commandName, Interpreter interpreter) { - interpreter.printNormal("Usage: @" + commandName + " [TASK] \n" // + interpreter.printNormal("Usage: @" + commandName + " [TASK] [RDF predicate]\n" // + " file: path to the file to load\n" // + " TASK: optional; one of RULES (default), OWL, RDF:\n" // + " RULES to load a knowledge base in Rulewerk rls format\n" // + " OWL to load an OWL ontology and convert it to facts and rules\n" // - + " RDF to load an RDF document and convert it to facts for predicate TRIPLE[3]\n"); + + " RDF to load an RDF document and convert it to facts\n" + + " RDF predicate: optional name of the predicate used for loading RDF triples (default: TRIPLE)\n"); } @Override diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java index ded18aa69..7eae820a5 100644 --- a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java @@ -185,6 +185,26 @@ public void correctUseWithRdfTask_Nt_succeeds() assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); } + + @Test + public void correctUseWithRdfTask_NtCustomPredicate_succeeds() + throws ParsingException, CommandExecutionException, IOException, PrefixDeclarationException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Predicate predicate = Expressions.makePredicate("http://example.org/mytriple", 3); + Term terma = Expressions.makeAbstractConstant("http://example.org/a"); + Term termb = Expressions.makeAbstractConstant("http://example.org/b"); + Term termc = Expressions.makeAbstractConstant("http://example.org/c"); + Fact fact = Expressions.makeFact(predicate, terma, termb, termc); + + Command command = interpreter.parseCommand("@load RDF 'src/test/data/loadtest.nt' ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + } @Test public void correctUseWithRdfTask_Turtle_succeeds() @@ -256,6 +276,24 @@ public void wrongArgumentCount_fails() throws ParsingException, CommandExecution Command command = interpreter.parseCommand("@load ."); interpreter.runCommand(command); } + + @Test(expected = CommandExecutionException.class) + public void wrongRdfPredicateTermType_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@load RDF \"file.nt\" \"string\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongRdfPredicateArgumentType_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@load RDF \"file.nt\" p(a) ."); + interpreter.runCommand(command); + } @Test(expected = CommandExecutionException.class) public void wrongArgumentType_fails() throws ParsingException, CommandExecutionException { From 0a4d23fcdc8c7e5f68f6f52f3b010265d6c9301a Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 26 Aug 2020 16:04:46 +0200 Subject: [PATCH 0763/1003] do not generate new prefix when setting same namespace twice --- .../MergingPrefixDeclarationRegistry.java | 11 ++++++++--- .../model/MergingPrefixDeclarationRegistryTest.java | 11 +++++++++++ 2 files changed, 19 insertions(+), 3 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java index 5dade8819..3413f19c4 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java @@ -82,15 +82,20 @@ public void setBaseIri(String baseIri) { } /** - * Registers a prefix declaration. If prefixName is already registered, a - * freshly generated name will be used instead. + * Registers a prefix declaration. If prefixName is already registered for + * another IRI, a freshly generated name will be used instead. * * @param prefixName the name of the prefix. * @param prefixIri the IRI of the prefix. */ @Override public void setPrefixIri(String prefixName, String prefixIri) { - String name = prefixes.containsKey(prefixName) ? getFreshPrefix() : prefixName; + String name; + if (prefixes.containsKey(prefixName) && !prefixIri.equals(prefixes.get(prefixName))) { + name = getFreshPrefix(); + } else { + name = prefixName; + } prefixes.put(name, prefixIri); } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java index 6705ba25d..cc46e3035 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java @@ -23,6 +23,8 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; +import java.util.stream.StreamSupport; + import org.junit.Before; import org.junit.Test; import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; @@ -100,6 +102,15 @@ public void setPrefixIri_redeclarePrefix_succeeds() throws PrefixDeclarationExce prefixDeclarations.setPrefixIri("eg:", BASE); prefixDeclarations.setPrefixIri("eg:", MORE_SPECIFIC); assertEquals(BASE, prefixDeclarations.getPrefixIri("eg:")); + assertEquals(2, StreamSupport.stream(prefixDeclarations.spliterator(), false).count()); + } + + @Test + public void setPrefixIri_setSamePrefix_succeeds() throws PrefixDeclarationException { + prefixDeclarations.setPrefixIri("eg:", BASE); + prefixDeclarations.setPrefixIri("eg:", BASE); + assertEquals(BASE, prefixDeclarations.getPrefixIri("eg:")); + assertEquals(1, StreamSupport.stream(prefixDeclarations.spliterator(), false).count()); } @Test From b1c9516bb2f4a3a24a1a94686780db7f34ec9117 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 26 Aug 2020 16:48:29 +0200 Subject: [PATCH 0764/1003] support ABox style RDF loading --- .../model/api/PrefixDeclarationRegistry.java | 3 ++ .../rulewerk/rdf/RdfModelConverter.java | 39 ++++++++++---- .../rulewerk/rdf/RdfValueToTermConverter.java | 7 +++ .../data/input/test-turtle-type-weird.ttl | 4 ++ .../src/test/data/input/test-turtle-type.ttl | 4 ++ .../rulewerk/rdf/RdfModelConverterTest.java | 52 +++++++++++++++++++ .../rdf/RdfValueToTermConverterTest.java | 14 ++++- 7 files changed, 112 insertions(+), 11 deletions(-) create mode 100644 rulewerk-rdf/src/test/data/input/test-turtle-type-weird.ttl create mode 100644 rulewerk-rdf/src/test/data/input/test-turtle-type.ttl diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java index fd82ade16..37d62280e 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java @@ -44,6 +44,9 @@ public interface PrefixDeclarationRegistry extends Iterable . +@prefix rdf: . + +:a rdf:type "test"@de . diff --git a/rulewerk-rdf/src/test/data/input/test-turtle-type.ttl b/rulewerk-rdf/src/test/data/input/test-turtle-type.ttl new file mode 100644 index 000000000..53844257a --- /dev/null +++ b/rulewerk-rdf/src/test/data/input/test-turtle-type.ttl @@ -0,0 +1,4 @@ +@prefix : . +@prefix rdf: . + +:a rdf:type :c . diff --git a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfModelConverterTest.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfModelConverterTest.java index 6135416f4..0ab610723 100644 --- a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfModelConverterTest.java +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfModelConverterTest.java @@ -36,6 +36,7 @@ import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; @@ -98,4 +99,55 @@ public void addFactsCustomTriplePredicate_succeeds() assertEquals(Arrays.asList(fact), knowledgeBase.getFacts()); } + @Test + public void addFactsNoTriplePredicate_succeeds() + throws RDFParseException, RDFHandlerException, IOException, PrefixDeclarationException { + RdfModelConverter rdfModelConverter = new RdfModelConverter(true, null); + Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "test-turtle.ttl"), RDFFormat.TURTLE); + KnowledgeBase knowledgeBase = new KnowledgeBase(); + + Predicate predicate = Expressions.makePredicate("http://example.org/b", 2); + Term terma = Expressions.makeAbstractConstant("http://example.org/a"); + Term termc = Expressions.makeAbstractConstant("http://example.org/c"); + Fact fact = Expressions.makeFact(predicate, terma, termc); + + rdfModelConverter.addFacts(knowledgeBase, model); + + assertEquals(Arrays.asList(fact), knowledgeBase.getFacts()); + } + + @Test + public void addFactsNoTriplePredicateType_succeeds() + throws RDFParseException, RDFHandlerException, IOException, PrefixDeclarationException { + RdfModelConverter rdfModelConverter = new RdfModelConverter(true, null); + Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "test-turtle-type.ttl"), + RDFFormat.TURTLE); + KnowledgeBase knowledgeBase = new KnowledgeBase(); + + Predicate predicate = Expressions.makePredicate("http://example.org/c", 1); + Term terma = Expressions.makeAbstractConstant("http://example.org/a"); + Fact fact = Expressions.makeFact(predicate, terma); + + rdfModelConverter.addFacts(knowledgeBase, model); + + assertEquals(Arrays.asList(fact), knowledgeBase.getFacts()); + } + + @Test + public void addFactsNoTriplePredicateTypeWeird_succeeds() + throws RDFParseException, RDFHandlerException, IOException, PrefixDeclarationException { + RdfModelConverter rdfModelConverter = new RdfModelConverter(true, null); + Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "test-turtle-type-weird.ttl"), RDFFormat.TURTLE); + KnowledgeBase knowledgeBase = new KnowledgeBase(); + + Predicate predicate = Expressions.makePredicate(PrefixDeclarationRegistry.RDF_TYPE, 2); + Term terma = Expressions.makeAbstractConstant("http://example.org/a"); + Term termc = Expressions.makeLanguageStringConstant("test", "de"); + Fact fact = Expressions.makeFact(predicate, terma, termc); + + rdfModelConverter.addFacts(knowledgeBase, model); + + assertEquals(Arrays.asList(fact), knowledgeBase.getFacts()); + } + } diff --git a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverterTest.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverterTest.java index 16b40036f..29dab9abf 100644 --- a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverterTest.java +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverterTest.java @@ -34,6 +34,7 @@ import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; +import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.api.TermType; @@ -52,6 +53,17 @@ public void convertUri_succeeds() { assertEquals("http://example.org", term.getName()); } + @Test + public void convertUriToPredicate_succeeds() { + URI uri = new URIImpl("http://example.org/mypred"); + + RdfValueToTermConverter converter = new RdfValueToTermConverter(true); + Predicate predicate = converter.convertUriToPredicate(uri, 2); + + assertEquals("http://example.org/mypred", predicate.getName()); + assertEquals(2, predicate.getArity()); + } + @Test public void convertLiteralDatatype_succeeds() { Literal literal = new LiteralImpl("42", new URIImpl("http://example.org/integer")); @@ -113,7 +125,7 @@ public void convertBNode_succeeds() { assertNotEquals("myid", term.getName()); } - @Test(expected=RulewerkRuntimeException.class) + @Test(expected = RulewerkRuntimeException.class) public void convertValueUnkownType_fails() { Value value = Mockito.mock(Value.class); From 285e53516775f1f0a8ac8febf53a1d53b225374a Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 26 Aug 2020 16:58:31 +0200 Subject: [PATCH 0765/1003] support ABox style RDF import --- .../commands/LoadCommandInterpreter.java | 5 ++++ .../commands/LoadCommandInterpreterTest.java | 28 ++++++++++++++++--- 2 files changed, 29 insertions(+), 4 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index bec4713dc..63faf984f 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -61,6 +61,8 @@ public class LoadCommandInterpreter implements CommandInterpreter { static final String TASK_OWL = "OWL"; static final String TASK_RDF = "RDF"; + static final String PREDICATE_ABOX = "ABOX"; + @Override public void run(Command command, Interpreter interpreter) throws CommandExecutionException { String task; @@ -81,6 +83,9 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio if (command.getArguments().get(pos).fromTerm().isPresent() && command.getArguments().get(pos).fromTerm().get().getType() == TermType.ABSTRACT_CONSTANT) { rdfTriplePredicate = command.getArguments().get(pos).fromTerm().get().getName(); + if (PREDICATE_ABOX.equals(rdfTriplePredicate)) { // ABox-style import + rdfTriplePredicate = null; + } pos++; } else { throw new CommandExecutionException("Optional triple predicate name must be an IRI."); diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java index 7eae820a5..ae4d121a2 100644 --- a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java @@ -185,7 +185,7 @@ public void correctUseWithRdfTask_Nt_succeeds() assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); } - + @Test public void correctUseWithRdfTask_NtCustomPredicate_succeeds() throws ParsingException, CommandExecutionException, IOException, PrefixDeclarationException { @@ -198,7 +198,27 @@ public void correctUseWithRdfTask_NtCustomPredicate_succeeds() Term termc = Expressions.makeAbstractConstant("http://example.org/c"); Fact fact = Expressions.makeFact(predicate, terma, termb, termc); - Command command = interpreter.parseCommand("@load RDF 'src/test/data/loadtest.nt' ."); + Command command = interpreter + .parseCommand("@load RDF 'src/test/data/loadtest.nt' ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + } + + @Test + public void correctUseWithRdfTask_NtABoxLoading_succeeds() + throws ParsingException, CommandExecutionException, IOException, PrefixDeclarationException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Predicate predicate = Expressions.makePredicate("http://example.org/b", 2); + Term terma = Expressions.makeAbstractConstant("http://example.org/a"); + Term termc = Expressions.makeAbstractConstant("http://example.org/c"); + Fact fact = Expressions.makeFact(predicate, terma, termc); + + Command command = interpreter.parseCommand("@load RDF 'src/test/data/loadtest.nt' ABOX."); interpreter.runCommand(command); assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); @@ -276,7 +296,7 @@ public void wrongArgumentCount_fails() throws ParsingException, CommandExecution Command command = interpreter.parseCommand("@load ."); interpreter.runCommand(command); } - + @Test(expected = CommandExecutionException.class) public void wrongRdfPredicateTermType_fails() throws ParsingException, CommandExecutionException { StringWriter writer = new StringWriter(); @@ -285,7 +305,7 @@ public void wrongRdfPredicateTermType_fails() throws ParsingException, CommandEx Command command = interpreter.parseCommand("@load RDF \"file.nt\" \"string\" ."); interpreter.runCommand(command); } - + @Test(expected = CommandExecutionException.class) public void wrongRdfPredicateArgumentType_fails() throws ParsingException, CommandExecutionException { StringWriter writer = new StringWriter(); From e1b7cdb5e59e97eb04f1fdce0895a13baf15d925 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 26 Aug 2020 17:05:21 +0200 Subject: [PATCH 0766/1003] updated help --- .../rulewerk/commands/LoadCommandInterpreter.java | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index 63faf984f..a5dd3196c 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -200,12 +200,14 @@ private Model parseRdfFromStream(InputStream inputStream, RDFFormat rdfFormat, S @Override public void printHelp(String commandName, Interpreter interpreter) { interpreter.printNormal("Usage: @" + commandName + " [TASK] [RDF predicate]\n" // - + " file: path to the file to load\n" // + " TASK: optional; one of RULES (default), OWL, RDF:\n" // + " RULES to load a knowledge base in Rulewerk rls format\n" // + " OWL to load an OWL ontology and convert it to facts and rules\n" // - + " RDF to load an RDF document and convert it to facts\n" - + " RDF predicate: optional name of the predicate used for loading RDF triples (default: TRIPLE)\n"); + + " RDF to load an RDF document and convert it to facts\n" // + + " file: path to the file to load\n" // + + " RDF predicate: optional name of the predicate used for loading RDF\n" // + + " triples (default: TRIPLE); use ABOX to load triples\n" // + + " like OWL assertions, using unary and binary predicates\n"); } @Override From 80c9eb25ea333ae728d044256ecf1b134a9c8e89 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Wed, 26 Aug 2020 17:09:03 +0200 Subject: [PATCH 0767/1003] fix add exit command to completer --- .../client/shell/InteractiveShell.java | 6 +- .../rulewerk/client/shell/Shell.java | 49 +++++++----- .../rulewerk/client/shell/ShellTest.java | 75 +++++++++++++++++++ 3 files changed, 108 insertions(+), 22 deletions(-) create mode 100644 rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java index 478299580..e07c0fc96 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java @@ -48,14 +48,12 @@ public static void run() throws IOException { final Terminal terminal = DefaultConfiguration.buildTerminal(); final Interpreter interpreter = initializeInterpreter(terminal); - + final Shell shell = new Shell(interpreter); final LineReader lineReader = DefaultConfiguration.buildLineReader(terminal, interpreter); final AttributedString promptProvider = DefaultConfiguration.buildPromptProvider(); - final Shell shell = new Shell(lineReader, promptProvider, interpreter); - shell.run(); - + shell.run(lineReader, promptProvider); } static Interpreter initializeInterpreter(final Terminal terminal) { diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index 7d67734d7..98c0a3345 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -3,6 +3,7 @@ import org.jline.reader.EndOfFileException; import org.jline.reader.LineReader; import org.jline.reader.UserInterruptException; +import org.jline.terminal.Terminal; import org.jline.utils.AttributedString; /*- @@ -36,30 +37,30 @@ public class Shell { private final Interpreter interpreter; - private final LineReader lineReader; - private final AttributedString prompt; boolean running; - public Shell(final LineReader lineReader, final AttributedString prompt, final Interpreter interpreter) { - this.lineReader = lineReader; - this.prompt = prompt; + public Shell(final Interpreter interpreter) { this.interpreter = interpreter; + this.registerExitCommand(); + } + + private void registerExitCommand() { final CommandInterpreter exitCommandInterpreter = new ExitCommandInterpreter(this); for (final ExitCommandName exitCommandName : ExitCommandName.values()) { - interpreter.registerCommandInterpreter(exitCommandName.toString(), exitCommandInterpreter); + this.interpreter.registerCommandInterpreter(exitCommandName.toString(), exitCommandInterpreter); } } - public void run() { + public void run(final LineReader lineReader, final AttributedString prompt) { this.printWelcome(); this.running = true; while (this.running) { final Command command; try { - command = this.readCommand(); + command = this.readCommand(lineReader, prompt); } catch (final Exception e) { this.interpreter.printNormal("Unexpected error: " + e.getMessage() + "\n"); e.printStackTrace(); @@ -85,10 +86,10 @@ public void run() { * * @return command or null */ - public Command readCommand() { + public Command readCommand(final LineReader lineReader, final AttributedString prompt) { String readLine; try { - readLine = this.lineReader.readLine(this.prompt.toAnsi(this.lineReader.getTerminal())); + readLine = this.readLine(lineReader, prompt); } catch (final UserInterruptException e) { if (e.getPartialLine().isEmpty()) { // Exit request from user CTRL+C @@ -102,16 +103,10 @@ public Command readCommand() { } - readLine = readLine.trim(); - if ("".equals(readLine)) { + readLine = this.processReadLine(readLine); + if (readLine.isEmpty()) { return null; } - if (readLine.charAt(0) != '@') { - readLine = "@" + readLine; - } - if (readLine.charAt(readLine.length() - 1) != '.') { - readLine = readLine + " ."; - } try { return this.interpreter.parseCommand(readLine); @@ -121,6 +116,24 @@ public Command readCommand() { } } + private String readLine(final LineReader lineReader, final AttributedString prompt) { + final Terminal terminal = lineReader.getTerminal(); + return lineReader.readLine(prompt.toAnsi(terminal)); + } + + String processReadLine(final String readLine) { + String result = readLine.trim(); + if (!result.isEmpty()) { + if (readLine.charAt(0) != '@') { + result = "@" + readLine; + } + if (readLine.charAt(readLine.length() - 1) != '.') { + result = readLine + " ."; + } + } + return result; + } + public void exitShell() { this.running = false; } diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java new file mode 100644 index 000000000..321cc32a8 --- /dev/null +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java @@ -0,0 +1,75 @@ +//package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +// +//import static org.junit.Assert.assertNull; +// +//import java.io.PrintWriter; +// +//import org.jline.reader.LineReader; +//import org.jline.terminal.Terminal; +//import org.jline.utils.AttributedString; +//import org.junit.Test; +//import org.mockito.Mockito; +//import org.semanticweb.rulewerk.commands.Interpreter; +//import org.semanticweb.rulewerk.core.model.api.Command; +//import org.semanticweb.rulewerk.core.reasoner.Reasoner; +//import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; +//import org.semanticweb.rulewerk.parser.ParserConfiguration; +// +//public class ShellTest { +// +// @Test +// public void testProcessLineEmpty() { +// final Terminal terminalMock = Mockito.mock(Terminal.class); +// final Interpreter interpreter = getMockInterpreter(terminalMock); +// DefaultConfiguration.buildLineReader(terminalMock, interpreter); +// final LineReader lineReaderMock = Mockito.mock(LineReader.class); +// final AttributedString prompt = Mockito.mock(AttributedString.class); +// final Shell shell = new Shell(lineReaderMock, prompt, interpreter); +// +// Mockito.when(lineReaderMock.readLine(prompt)).thenReturn(""); +// +// final Command readCommand = shell.readCommand(); +// assertNull(readCommand); +// } +// +// +// static public Interpreter getMockInterpreter(final Terminal terminal) { +// final Reasoner reasonerMock = Mockito.mock(Reasoner.class); +// final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); +// +// final Interpreter interpreter = new Interpreter(reasonerMock, new TerminalStyledPrinter(terminal), +// parserConfiguration); +// +// final PrintWriter printWriter = Mockito.mock(PrintWriter.class); +// Mockito.when(terminal.writer()).thenReturn(printWriter); +//// +//// // final TerminalStyledPrinter printer = new TerminalStyledPrinter(writer); +//// final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); +//// final KnowledgeBase knowledgeBase = new KnowledgeBase(); +// +//// Mockito.when(reasoner.getKnowledgeBase()).thenReturn(knowledgeBase); +//// return new Interpreter(reasoner, printer, parserConfiguration); +// return interpreter; +// } +// +//} From 206c6a2f38ac32d1b1c8a060ac84c2077f5a082b Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Wed, 26 Aug 2020 22:58:28 +0200 Subject: [PATCH 0768/1003] create prompt only once --- .../rulewerk/client/picocli/Main.java | 2 +- .../client/shell/DefaultConfiguration.java | 14 ++-- .../client/shell/InteractiveShell.java | 15 ++--- .../rulewerk/client/shell/Shell.java | 23 +++---- .../rulewerk/client/shell/ShellTest.java | 49 +++++++------- .../commands/ExitCommandInterpreterTest.java | 66 +++++++++++++++++++ 6 files changed, 114 insertions(+), 55 deletions(-) create mode 100644 rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java index 15a0d259f..409bd3a5b 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java @@ -46,7 +46,7 @@ public static void main(final String[] args) throws IOException { configureLogging(); if (args.length == 0 || (args.length > 0 && args[0].equals("shell"))) { - InteractiveShell.run(); + new InteractiveShell().run(); } else { if (args[0].equals("materialize")) { final CommandLine commandline = new CommandLine(new RulewerkClientMaterialize()); diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java index c53ad76be..76d8b7c3e 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java @@ -43,11 +43,6 @@ public final class DefaultConfiguration { private DefaultConfiguration() { } - public static AttributedString buildPromptProvider() { - return new AttributedString("rulewerk>", AttributedStyle.DEFAULT.foreground(AttributedStyle.YELLOW)); - } - - public static LineReader buildLineReader(final Terminal terminal, final Interpreter interpreter) { final LineReaderBuilder lineReaderBuilder = LineReaderBuilder.builder().terminal(terminal) .appName("Rulewerk Shell").completer(buildCompleter(interpreter)) @@ -85,4 +80,13 @@ public static Terminal buildTerminal() throws IOException { return TerminalBuilder.builder().dumb(true).jansi(true).jna(false).system(true).build(); } + public static String buildPrompt(final Terminal terminal) { + return buildPromptProvider().toAnsi(terminal); + } + + public static AttributedString buildPromptProvider() { + final AttributedStyle promptStyle = AttributedStyle.DEFAULT.foreground(AttributedStyle.YELLOW); + return new AttributedString("rulewerk>", promptStyle); + } + } diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java index e07c0fc96..88ca015bf 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java @@ -24,7 +24,6 @@ import org.jline.reader.LineReader; import org.jline.terminal.Terminal; -import org.jline.utils.AttributedString; import org.semanticweb.rulewerk.commands.Interpreter; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; @@ -39,24 +38,20 @@ public class InteractiveShell //implements Runnable { - public static void main(final String[] args) throws IOException { - run(); - } - // @Override - public static void run() throws IOException { + public void run() throws IOException { final Terminal terminal = DefaultConfiguration.buildTerminal(); - final Interpreter interpreter = initializeInterpreter(terminal); + final Interpreter interpreter = this.initializeInterpreter(terminal); final Shell shell = new Shell(interpreter); final LineReader lineReader = DefaultConfiguration.buildLineReader(terminal, interpreter); - final AttributedString promptProvider = DefaultConfiguration.buildPromptProvider(); + final String prompt = DefaultConfiguration.buildPrompt(terminal); - shell.run(lineReader, promptProvider); + shell.run(lineReader, prompt); } - static Interpreter initializeInterpreter(final Terminal terminal) { + Interpreter initializeInterpreter(final Terminal terminal) { final KnowledgeBase knowledgeBase = new KnowledgeBase(); final Reasoner reasoner = new VLogReasoner(knowledgeBase); final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index 98c0a3345..4cc99d7ad 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -3,8 +3,6 @@ import org.jline.reader.EndOfFileException; import org.jline.reader.LineReader; import org.jline.reader.UserInterruptException; -import org.jline.terminal.Terminal; -import org.jline.utils.AttributedString; /*- * #%L @@ -53,7 +51,7 @@ private void registerExitCommand() { } } - public void run(final LineReader lineReader, final AttributedString prompt) { + public void run(final LineReader lineReader, final String prompt) { this.printWelcome(); this.running = true; @@ -84,12 +82,14 @@ public void run(final LineReader lineReader, final AttributedString prompt) { * made to interpret mistyped commands by adding @ and . before and after the * input, if forgotten. * + * @param prompt + * * @return command or null */ - public Command readCommand(final LineReader lineReader, final AttributedString prompt) { + public Command readCommand(final LineReader lineReader, final String prompt) { String readLine; try { - readLine = this.readLine(lineReader, prompt); + readLine = lineReader.readLine(prompt); } catch (final UserInterruptException e) { if (e.getPartialLine().isEmpty()) { // Exit request from user CTRL+C @@ -116,19 +116,14 @@ public Command readCommand(final LineReader lineReader, final AttributedString p } } - private String readLine(final LineReader lineReader, final AttributedString prompt) { - final Terminal terminal = lineReader.getTerminal(); - return lineReader.readLine(prompt.toAnsi(terminal)); - } - String processReadLine(final String readLine) { String result = readLine.trim(); if (!result.isEmpty()) { - if (readLine.charAt(0) != '@') { - result = "@" + readLine; + if (result.charAt(0) != '@') { + result = "@" + result; } - if (readLine.charAt(readLine.length() - 1) != '.') { - result = readLine + " ."; + if (result.charAt(result.length() - 1) != '.') { + result = result + " ."; } } return result; diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java index 321cc32a8..9cd2355de 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java @@ -1,24 +1,24 @@ //package org.semanticweb.rulewerk.client.shell; - -/*- - * #%L - * Rulewerk Client - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ +// +///*- +// * #%L +// * Rulewerk Client +// * %% +// * Copyright (C) 2018 - 2020 Rulewerk Developers +// * %% +// * Licensed under the Apache License, Version 2.0 (the "License"); +// * you may not use this file except in compliance with the License. +// * You may obtain a copy of the License at +// * +// * http://www.apache.org/licenses/LICENSE-2.0 +// * +// * Unless required by applicable law or agreed to in writing, software +// * distributed under the License is distributed on an "AS IS" BASIS, +// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// * See the License for the specific language governing permissions and +// * limitations under the License. +// * #L% +// */ // //import static org.junit.Assert.assertNull; // @@ -43,16 +43,15 @@ // final Interpreter interpreter = getMockInterpreter(terminalMock); // DefaultConfiguration.buildLineReader(terminalMock, interpreter); // final LineReader lineReaderMock = Mockito.mock(LineReader.class); -// final AttributedString prompt = Mockito.mock(AttributedString.class); -// final Shell shell = new Shell(lineReaderMock, prompt, interpreter); +// final AttributedString prompt = Mockito.mock(AttributedString.class); +// final Shell shell = new Shell(interpreter); // -// Mockito.when(lineReaderMock.readLine(prompt)).thenReturn(""); +// Mockito.when(lineReaderMock.readLine(Mockito.anyString())).thenReturn(""); // -// final Command readCommand = shell.readCommand(); +// final Command readCommand = shell.readCommand(lineReaderMock, prompt); // assertNull(readCommand); // } // -// // static public Interpreter getMockInterpreter(final Terminal terminal) { // final Reasoner reasonerMock = Mockito.mock(Reasoner.class); // final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java new file mode 100644 index 000000000..3590b52d3 --- /dev/null +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java @@ -0,0 +1,66 @@ +//package org.semanticweb.rulewerk.client.shell.commands; +// +///*- +// * #%L +// * Rulewerk Client +// * %% +// * Copyright (C) 2018 - 2020 Rulewerk Developers +// * %% +// * Licensed under the Apache License, Version 2.0 (the "License"); +// * you may not use this file except in compliance with the License. +// * You may obtain a copy of the License at +// * +// * http://www.apache.org/licenses/LICENSE-2.0 +// * +// * Unless required by applicable law or agreed to in writing, software +// * distributed under the License is distributed on an "AS IS" BASIS, +// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// * See the License for the specific language governing permissions and +// * limitations under the License. +// * #L% +// */ +// +//import static org.junit.Assert.assertTrue; +// +//import java.io.StringWriter; +// +//import org.junit.Test; +//import org.mockito.Mockito; +//import org.semanticweb.rulewerk.client.shell.Shell; +//import org.semanticweb.rulewerk.commands.CommandExecutionException; +//import org.semanticweb.rulewerk.commands.CommandInterpreter; +//import org.semanticweb.rulewerk.commands.Interpreter; +//import org.semanticweb.rulewerk.commands.InterpreterTest; +//import org.semanticweb.rulewerk.parser.ParsingException; +// +//public class ExitCommandInterpreterTest { +// +// @Test +// public void help_succeeds() throws ParsingException, CommandExecutionException { +// final StringWriter writer = new StringWriter(); +// final Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); +//// final CommandInterpreter commandInterpreter = new AddSourceCommandInterpreter(); +//// InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); +// +// final Shell shellMock = Mockito.mock(Shell.class); +// final CommandInterpreter commandInterpreter = new ExitCommandInterpreter(shellMock); +// commandInterpreter.printHelp("commandname", interpreter); +// +// commandInterpreter.printHelp("commandname", interpreter); +// final String result = writer.toString(); +// +// assertTrue(result.startsWith("Usage: @commandname ")); +// assertTrue(result.endsWith("\n")); +// +//// InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); +// } +// +// @Test +// public void synopsis_succeeds() throws ParsingException, CommandExecutionException { +// final Shell shellMock = Mockito.mock(Shell.class); +// final CommandInterpreter commandInterpreter = new ExitCommandInterpreter(shellMock); +// final String synopsis = commandInterpreter.getSynopsis(); +// assertTrue(synopsis.length() < 70); +// } +// +//} From 068a985db8d379798085625bdd472772426f3409 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 27 Aug 2020 00:07:01 +0200 Subject: [PATCH 0769/1003] some unit tests Shell readCommand and ExitCommandInterpreter --- .../rulewerk/client/shell/Shell.java | 1 - .../commands/ExitCommandInterpreter.java | 13 +- .../rulewerk/client/shell/ShellTest.java | 138 +++++++++------ .../commands/ExitCommandInterpreterTest.java | 162 +++++++++++------- 4 files changed, 182 insertions(+), 132 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index 4cc99d7ad..7f56b3f22 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -100,7 +100,6 @@ public Command readCommand(final LineReader lineReader, final String prompt) { } catch (final EndOfFileException e) { // Exit request from user CTRL+D return ExitCommandInterpreter.EXIT_COMMAND; - } readLine = this.processReadLine(readLine); diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java index ea2645279..85f4edb6d 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java @@ -34,25 +34,16 @@ public class ExitCommandInterpreter implements CommandInterpreter { public static enum ExitCommandName { exit; - - public static boolean isExitCommand(final String commandName) { - for (final ExitCommandName name : values()) { - if (name.toString().equals(commandName)) { - return true; - } - } - return false; - } } final Shell shell; - public ExitCommandInterpreter(Shell shell) { + public ExitCommandInterpreter(final Shell shell) { this.shell = shell; } @Override - public void printHelp(final String commandName, Interpreter interpreter) { + public void printHelp(final String commandName, final Interpreter interpreter) { interpreter.printNormal("Usage: " + commandName + ".\n"); } diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java index 9cd2355de..bb3e8a02e 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java @@ -1,56 +1,84 @@ -//package org.semanticweb.rulewerk.client.shell; -// -///*- -// * #%L -// * Rulewerk Client -// * %% -// * Copyright (C) 2018 - 2020 Rulewerk Developers -// * %% -// * Licensed under the Apache License, Version 2.0 (the "License"); -// * you may not use this file except in compliance with the License. -// * You may obtain a copy of the License at -// * -// * http://www.apache.org/licenses/LICENSE-2.0 -// * -// * Unless required by applicable law or agreed to in writing, software -// * distributed under the License is distributed on an "AS IS" BASIS, -// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// * See the License for the specific language governing permissions and -// * limitations under the License. -// * #L% -// */ -// -//import static org.junit.Assert.assertNull; -// -//import java.io.PrintWriter; -// -//import org.jline.reader.LineReader; -//import org.jline.terminal.Terminal; -//import org.jline.utils.AttributedString; -//import org.junit.Test; -//import org.mockito.Mockito; -//import org.semanticweb.rulewerk.commands.Interpreter; -//import org.semanticweb.rulewerk.core.model.api.Command; -//import org.semanticweb.rulewerk.core.reasoner.Reasoner; -//import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; -//import org.semanticweb.rulewerk.parser.ParserConfiguration; -// -//public class ShellTest { -// -// @Test -// public void testProcessLineEmpty() { -// final Terminal terminalMock = Mockito.mock(Terminal.class); -// final Interpreter interpreter = getMockInterpreter(terminalMock); -// DefaultConfiguration.buildLineReader(terminalMock, interpreter); -// final LineReader lineReaderMock = Mockito.mock(LineReader.class); -// final AttributedString prompt = Mockito.mock(AttributedString.class); -// final Shell shell = new Shell(interpreter); -// -// Mockito.when(lineReaderMock.readLine(Mockito.anyString())).thenReturn(""); -// -// final Command readCommand = shell.readCommand(lineReaderMock, prompt); -// assertNull(readCommand); -// } +package org.semanticweb.rulewerk.client.shell; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +import org.jline.reader.LineReader; +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.commands.Interpreter; +import org.semanticweb.rulewerk.core.model.api.Command; + + +public class ShellTest { + + @Test + public void processReadLine_Blank() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" "); + assertEquals("", processedReadLine); + } + + @Test + public void processReadLine_StartsWithAt() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" @ "); + assertEquals("@ .", processedReadLine); + } + + @Test + public void processReadLine_EndsWithStop() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" . "); + assertEquals("@.", processedReadLine); + } + + @Test + public void processReadLine_StartsWithAtEndsWithStop() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" @. "); + assertEquals("@.", processedReadLine); + } + + @Test + public void processReadLine_DoesNotStartWithAt_DoesNotEndWithStop() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" .@ "); + assertEquals("@.@ .", processedReadLine); + } + + @Test + public void readCommand_Blank() { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + + final String prompt = "myPrompt"; + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + + Mockito.when(lineReaderMock.readLine(prompt)).thenReturn(" "); + + final Command command = shell.readCommand(lineReaderMock, prompt); + assertNull(command); + + // TODO test interpreter.parseCommand was not called + // TODO test exceptions have not been thrown + } + + @Test + public void readCommand_Invalid() { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + + final String prompt = "myPrompt"; + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + + Mockito.when(lineReaderMock.readLine(prompt)).thenReturn("invalid"); + + final Command command = shell.readCommand(lineReaderMock, prompt); + assertNull(command); + + // TODO test interpreter.parseCommand was called + // TODO test Parsing exception has been thrown + } + // // static public Interpreter getMockInterpreter(final Terminal terminal) { // final Reasoner reasonerMock = Mockito.mock(Reasoner.class); @@ -70,5 +98,5 @@ //// return new Interpreter(reasoner, printer, parserConfiguration); // return interpreter; // } -// -//} + +} diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java index 3590b52d3..b442817b8 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java @@ -1,66 +1,98 @@ -//package org.semanticweb.rulewerk.client.shell.commands; -// -///*- -// * #%L -// * Rulewerk Client -// * %% -// * Copyright (C) 2018 - 2020 Rulewerk Developers -// * %% -// * Licensed under the Apache License, Version 2.0 (the "License"); -// * you may not use this file except in compliance with the License. -// * You may obtain a copy of the License at -// * -// * http://www.apache.org/licenses/LICENSE-2.0 -// * -// * Unless required by applicable law or agreed to in writing, software -// * distributed under the License is distributed on an "AS IS" BASIS, -// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// * See the License for the specific language governing permissions and -// * limitations under the License. -// * #L% -// */ -// -//import static org.junit.Assert.assertTrue; -// -//import java.io.StringWriter; -// -//import org.junit.Test; -//import org.mockito.Mockito; -//import org.semanticweb.rulewerk.client.shell.Shell; -//import org.semanticweb.rulewerk.commands.CommandExecutionException; -//import org.semanticweb.rulewerk.commands.CommandInterpreter; -//import org.semanticweb.rulewerk.commands.Interpreter; -//import org.semanticweb.rulewerk.commands.InterpreterTest; -//import org.semanticweb.rulewerk.parser.ParsingException; -// -//public class ExitCommandInterpreterTest { -// -// @Test -// public void help_succeeds() throws ParsingException, CommandExecutionException { -// final StringWriter writer = new StringWriter(); -// final Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); -//// final CommandInterpreter commandInterpreter = new AddSourceCommandInterpreter(); -//// InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); -// -// final Shell shellMock = Mockito.mock(Shell.class); -// final CommandInterpreter commandInterpreter = new ExitCommandInterpreter(shellMock); -// commandInterpreter.printHelp("commandname", interpreter); -// -// commandInterpreter.printHelp("commandname", interpreter); -// final String result = writer.toString(); -// -// assertTrue(result.startsWith("Usage: @commandname ")); -// assertTrue(result.endsWith("\n")); -// -//// InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); -// } -// -// @Test -// public void synopsis_succeeds() throws ParsingException, CommandExecutionException { -// final Shell shellMock = Mockito.mock(Shell.class); -// final CommandInterpreter commandInterpreter = new ExitCommandInterpreter(shellMock); -// final String synopsis = commandInterpreter.getSynopsis(); -// assertTrue(synopsis.length() < 70); -// } -// +package org.semanticweb.rulewerk.client.shell.commands; + +import static org.junit.Assert.assertEquals; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertTrue; + +import java.io.StringWriter; +import java.io.Writer; + +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.client.shell.Shell; +import org.semanticweb.rulewerk.commands.CommandExecutionException; +import org.semanticweb.rulewerk.commands.CommandInterpreter; +import org.semanticweb.rulewerk.commands.Interpreter; +import org.semanticweb.rulewerk.commands.SimpleStyledPrinter; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; +import org.semanticweb.rulewerk.parser.ParserConfiguration; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class ExitCommandInterpreterTest { + + @Test + public void exitShell_succeeds() throws CommandExecutionException { + final Interpreter interpreterMock = Mockito.mock(Interpreter.class); + final Shell shell = new Shell(interpreterMock); + final Shell shellSpy = Mockito.spy(shell); + final CommandInterpreter commandInterpreter = new ExitCommandInterpreter(shellSpy); + + commandInterpreter.run(Mockito.mock(Command.class), interpreterMock); + + Mockito.verify(shellSpy).exitShell(); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + final Shell shellMock = Mockito.mock(Shell.class); + final CommandInterpreter commandInterpreter = new ExitCommandInterpreter(shellMock); + final StringWriter writer = new StringWriter(); + final Interpreter interpreter = getMockInterpreter(writer); + + final Interpreter interpreterSpy = Mockito.spy(interpreter); + commandInterpreter.printHelp("commandname", interpreterSpy); + + Mockito.verify(interpreterSpy).printNormal("Usage: commandname.\n"); + + final String result = writer.toString(); + assertEquals("Usage: commandname.\n", result); + + // TODO what about testing printing to terminal? + // TODO establish test scope + } + +// static public Interpreter getMockTerminalInterpreter(final Terminal terminal) { +// final StyledPrinter printer = new TerminalStyledPrinter(terminal); +// final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); +// final Reasoner reasoner = Mockito.mock(Reasoner.class); +// return new Interpreter(reasoner, printer, parserConfiguration); //} + + static public Interpreter getMockInterpreter(final Writer writer) { + final SimpleStyledPrinter printer = new SimpleStyledPrinter(writer); + final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + final Reasoner reasoner = Mockito.mock(Reasoner.class); + return new Interpreter(reasoner, printer, parserConfiguration); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + final Shell shellMock = Mockito.mock(Shell.class); + final CommandInterpreter commandInterpreter = new ExitCommandInterpreter(shellMock); + final String synopsis = commandInterpreter.getSynopsis(); + assertTrue(synopsis.length() < 70); + } + +} From c857ab0c10f28e09a163df0ea981d1dbbc72eca8 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 27 Aug 2020 00:26:52 +0200 Subject: [PATCH 0770/1003] unit test read exit command --- .../rulewerk/client/shell/ShellTest.java | 64 +++++++++++++------ 1 file changed, 44 insertions(+), 20 deletions(-) diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java index bb3e8a02e..4fae876ff 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java @@ -1,14 +1,22 @@ package org.semanticweb.rulewerk.client.shell; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; +import java.io.PrintWriter; + import org.jline.reader.LineReader; +import org.jline.terminal.Terminal; import org.junit.Test; import org.mockito.Mockito; +import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter; import org.semanticweb.rulewerk.commands.Interpreter; import org.semanticweb.rulewerk.core.model.api.Command; - +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; +import org.semanticweb.rulewerk.parser.ParserConfiguration; public class ShellTest { @@ -79,24 +87,40 @@ public void readCommand_Invalid() { // TODO test Parsing exception has been thrown } -// -// static public Interpreter getMockInterpreter(final Terminal terminal) { -// final Reasoner reasonerMock = Mockito.mock(Reasoner.class); -// final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); -// -// final Interpreter interpreter = new Interpreter(reasonerMock, new TerminalStyledPrinter(terminal), -// parserConfiguration); -// -// final PrintWriter printWriter = Mockito.mock(PrintWriter.class); -// Mockito.when(terminal.writer()).thenReturn(printWriter); -//// -//// // final TerminalStyledPrinter printer = new TerminalStyledPrinter(writer); -//// final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); -//// final KnowledgeBase knowledgeBase = new KnowledgeBase(); -// -//// Mockito.when(reasoner.getKnowledgeBase()).thenReturn(knowledgeBase); -//// return new Interpreter(reasoner, printer, parserConfiguration); -// return interpreter; -// } + @Test + public void readCommand_Exit() { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + + final String prompt = "myPrompt"; + // TODO need real interpreter here + final Shell shell = new Shell(getMockInterpreter()); + + Mockito.when(lineReaderMock.readLine(prompt)).thenReturn("exit"); + + final Command command = shell.readCommand(lineReaderMock, prompt); + assertEquals(ExitCommandInterpreter.EXIT_COMMAND.getName(), command.getName()); + + // TODO test Parsing exception has not been thrown + // TODO test ExitCommandInterpreter.run() has been called + + assertFalse(shell.running); + } + + static public Interpreter getMockInterpreter() { + final Terminal terminal = Mockito.mock(Terminal.class); + final Reasoner reasoner = Mockito.mock(Reasoner.class); + final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + + final Interpreter interpreter = new Interpreter(reasoner, new TerminalStyledPrinter(terminal), + parserConfiguration); + + final PrintWriter printWriter = Mockito.mock(PrintWriter.class); + Mockito.when(terminal.writer()).thenReturn(printWriter); + + final KnowledgeBase knowledgeBase = new KnowledgeBase(); + Mockito.when(reasoner.getKnowledgeBase()).thenReturn(knowledgeBase); + + return interpreter; + } } From e68cf46b2156232656f5dbbfaa409619ce5dd678 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 27 Aug 2020 09:57:09 +0200 Subject: [PATCH 0771/1003] more meaningful exceptions --- .../java/org/semanticweb/rulewerk/parser/RuleParser.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java index c9a00c103..7043d92c4 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java @@ -181,8 +181,8 @@ static T parseSyntaxFragment(final String input, SyntaxFragme result = parserAction.parse(localParser); localParser.ensureEndOfInput(); } catch (ParseException | PrefixDeclarationException | TokenMgrError | RuntimeException e) { - LOGGER.error("Exception while parsing " + syntaxFragmentType + ": {}!", input); - throw new ParsingException("Exception while parsing " + syntaxFragmentType, e); + LOGGER.error("Error parsing " + syntaxFragmentType + ": {}!", input); + throw new ParsingException("Error parsing " + syntaxFragmentType + ": " + e.getMessage(), e); } return result; } @@ -257,8 +257,8 @@ static KnowledgeBase doParse(final JavaCCParser parser) throws ParsingException try { parser.parse(); } catch (ParseException | PrefixDeclarationException | TokenMgrError e) { - LOGGER.error("Exception while parsing Knowledge Base!", e); - throw new ParsingException("Exception while parsing Knowledge Base.", e); + LOGGER.error("Error parsing Knowledge Base: " + e.getMessage(), e); + throw new ParsingException(e.getMessage(), e); } KnowledgeBase knowledgeBase = parser.getKnowledgeBase(); From 4db3b7df1a13f69595e9a808cea38c49e8d7293c Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 27 Aug 2020 11:01:37 +0200 Subject: [PATCH 0772/1003] allow interpreter to make reasoner; new clear command --- .../client/shell/InteractiveShell.java | 19 +- .../rulewerk/client/shell/ShellTest.java | 271 ++++++++++-------- .../commands/ExitCommandInterpreterTest.java | 4 +- .../commands/ClearCommandInterpreter.java | 57 ++++ .../rulewerk/commands/Interpreter.java | 67 ++++- .../commands/ClearCommandInterpreterTest.java | 130 +++++++++ .../rulewerk/commands/InterpreterTest.java | 82 +++--- 7 files changed, 447 insertions(+), 183 deletions(-) create mode 100644 rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java create mode 100644 rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java index 88ca015bf..0eaa6dde1 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java @@ -25,8 +25,6 @@ import org.jline.reader.LineReader; import org.jline.terminal.Terminal; import org.semanticweb.rulewerk.commands.Interpreter; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; import org.semanticweb.rulewerk.parser.ParserConfiguration; import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; @@ -42,20 +40,21 @@ public class InteractiveShell public void run() throws IOException { final Terminal terminal = DefaultConfiguration.buildTerminal(); - final Interpreter interpreter = this.initializeInterpreter(terminal); - final Shell shell = new Shell(interpreter); - final LineReader lineReader = DefaultConfiguration.buildLineReader(terminal, interpreter); - final String prompt = DefaultConfiguration.buildPrompt(terminal); + try (Interpreter interpreter = this.initializeInterpreter(terminal)) { + final Shell shell = new Shell(interpreter); - shell.run(lineReader, prompt); + final LineReader lineReader = DefaultConfiguration.buildLineReader(terminal, interpreter); + final String prompt = DefaultConfiguration.buildPrompt(terminal); + + shell.run(lineReader, prompt); + } } Interpreter initializeInterpreter(final Terminal terminal) { - final KnowledgeBase knowledgeBase = new KnowledgeBase(); - final Reasoner reasoner = new VLogReasoner(knowledgeBase); final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); - final Interpreter interpreter = new Interpreter(reasoner, new TerminalStyledPrinter(terminal), + final Interpreter interpreter = new Interpreter(Interpreter.EMPTY_KNOWLEDGE_BASE_PROVIDER, + (knowledgeBase) -> new VLogReasoner(knowledgeBase), new TerminalStyledPrinter(terminal), parserConfiguration); return interpreter; diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java index 4fae876ff..0393a18a0 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java @@ -1,126 +1,145 @@ -package org.semanticweb.rulewerk.client.shell; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; - -import java.io.PrintWriter; - -import org.jline.reader.LineReader; -import org.jline.terminal.Terminal; -import org.junit.Test; -import org.mockito.Mockito; -import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter; -import org.semanticweb.rulewerk.commands.Interpreter; -import org.semanticweb.rulewerk.core.model.api.Command; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.Reasoner; -import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; -import org.semanticweb.rulewerk.parser.ParserConfiguration; - -public class ShellTest { - - @Test - public void processReadLine_Blank() { - final Shell shell = new Shell(Mockito.mock(Interpreter.class)); - final String processedReadLine = shell.processReadLine(" "); - assertEquals("", processedReadLine); - } - - @Test - public void processReadLine_StartsWithAt() { - final Shell shell = new Shell(Mockito.mock(Interpreter.class)); - final String processedReadLine = shell.processReadLine(" @ "); - assertEquals("@ .", processedReadLine); - } - - @Test - public void processReadLine_EndsWithStop() { - final Shell shell = new Shell(Mockito.mock(Interpreter.class)); - final String processedReadLine = shell.processReadLine(" . "); - assertEquals("@.", processedReadLine); - } - - @Test - public void processReadLine_StartsWithAtEndsWithStop() { - final Shell shell = new Shell(Mockito.mock(Interpreter.class)); - final String processedReadLine = shell.processReadLine(" @. "); - assertEquals("@.", processedReadLine); - } - - @Test - public void processReadLine_DoesNotStartWithAt_DoesNotEndWithStop() { - final Shell shell = new Shell(Mockito.mock(Interpreter.class)); - final String processedReadLine = shell.processReadLine(" .@ "); - assertEquals("@.@ .", processedReadLine); - } - - @Test - public void readCommand_Blank() { - final LineReader lineReaderMock = Mockito.mock(LineReader.class); - - final String prompt = "myPrompt"; - final Shell shell = new Shell(Mockito.mock(Interpreter.class)); - - Mockito.when(lineReaderMock.readLine(prompt)).thenReturn(" "); - - final Command command = shell.readCommand(lineReaderMock, prompt); - assertNull(command); - - // TODO test interpreter.parseCommand was not called - // TODO test exceptions have not been thrown - } - - @Test - public void readCommand_Invalid() { - final LineReader lineReaderMock = Mockito.mock(LineReader.class); - - final String prompt = "myPrompt"; - final Shell shell = new Shell(Mockito.mock(Interpreter.class)); - - Mockito.when(lineReaderMock.readLine(prompt)).thenReturn("invalid"); - - final Command command = shell.readCommand(lineReaderMock, prompt); - assertNull(command); - - // TODO test interpreter.parseCommand was called - // TODO test Parsing exception has been thrown - } - - @Test - public void readCommand_Exit() { - final LineReader lineReaderMock = Mockito.mock(LineReader.class); - - final String prompt = "myPrompt"; - // TODO need real interpreter here - final Shell shell = new Shell(getMockInterpreter()); - - Mockito.when(lineReaderMock.readLine(prompt)).thenReturn("exit"); - - final Command command = shell.readCommand(lineReaderMock, prompt); - assertEquals(ExitCommandInterpreter.EXIT_COMMAND.getName(), command.getName()); - - // TODO test Parsing exception has not been thrown - // TODO test ExitCommandInterpreter.run() has been called - - assertFalse(shell.running); - } - - static public Interpreter getMockInterpreter() { - final Terminal terminal = Mockito.mock(Terminal.class); - final Reasoner reasoner = Mockito.mock(Reasoner.class); - final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); - - final Interpreter interpreter = new Interpreter(reasoner, new TerminalStyledPrinter(terminal), - parserConfiguration); - - final PrintWriter printWriter = Mockito.mock(PrintWriter.class); - Mockito.when(terminal.writer()).thenReturn(printWriter); - - final KnowledgeBase knowledgeBase = new KnowledgeBase(); - Mockito.when(reasoner.getKnowledgeBase()).thenReturn(knowledgeBase); - - return interpreter; - } - -} +package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; + +import java.io.PrintWriter; + +import org.jline.reader.LineReader; +import org.jline.terminal.Terminal; +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter; +import org.semanticweb.rulewerk.commands.Interpreter; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; +import org.semanticweb.rulewerk.parser.ParserConfiguration; + +public class ShellTest { + + @Test + public void processReadLine_Blank() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" "); + assertEquals("", processedReadLine); + } + + @Test + public void processReadLine_StartsWithAt() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" @ "); + assertEquals("@ .", processedReadLine); + } + + @Test + public void processReadLine_EndsWithStop() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" . "); + assertEquals("@.", processedReadLine); + } + + @Test + public void processReadLine_StartsWithAtEndsWithStop() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" @. "); + assertEquals("@.", processedReadLine); + } + + @Test + public void processReadLine_DoesNotStartWithAt_DoesNotEndWithStop() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" .@ "); + assertEquals("@.@ .", processedReadLine); + } + + @Test + public void readCommand_Blank() { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + + final String prompt = "myPrompt"; + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + + Mockito.when(lineReaderMock.readLine(prompt)).thenReturn(" "); + + final Command command = shell.readCommand(lineReaderMock, prompt); + assertNull(command); + + // TODO test interpreter.parseCommand was not called + // TODO test exceptions have not been thrown + } + + @Test + public void readCommand_Invalid() { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + + final String prompt = "myPrompt"; + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + + Mockito.when(lineReaderMock.readLine(prompt)).thenReturn("invalid"); + + final Command command = shell.readCommand(lineReaderMock, prompt); + assertNull(command); + + // TODO test interpreter.parseCommand was called + // TODO test Parsing exception has been thrown + } + + @Test + public void readCommand_Exit() { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + + final String prompt = "myPrompt"; + // TODO need real interpreter here + final Shell shell = new Shell(getMockInterpreter()); + + Mockito.when(lineReaderMock.readLine(prompt)).thenReturn("exit"); + + final Command command = shell.readCommand(lineReaderMock, prompt); + assertEquals(ExitCommandInterpreter.EXIT_COMMAND.getName(), command.getName()); + + // TODO test Parsing exception has not been thrown + // TODO test ExitCommandInterpreter.run() has been called + + assertFalse(shell.running); + } + + static public Interpreter getMockInterpreter() { + final Terminal terminal = Mockito.mock(Terminal.class); + final PrintWriter printWriter = Mockito.mock(PrintWriter.class); + Mockito.when(terminal.writer()).thenReturn(printWriter); + + final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + + final Interpreter interpreter = new Interpreter(Interpreter.EMPTY_KNOWLEDGE_BASE_PROVIDER, (kb) -> { + Reasoner reasoner = Mockito.mock(Reasoner.class); + Mockito.when(reasoner.getKnowledgeBase()).thenReturn(kb); + return reasoner; + }, new TerminalStyledPrinter(terminal), parserConfiguration); + + return interpreter; + } + +} diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java index b442817b8..f6169e80b 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java @@ -83,8 +83,8 @@ public void help_succeeds() throws ParsingException, CommandExecutionException { static public Interpreter getMockInterpreter(final Writer writer) { final SimpleStyledPrinter printer = new SimpleStyledPrinter(writer); final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); - final Reasoner reasoner = Mockito.mock(Reasoner.class); - return new Interpreter(reasoner, printer, parserConfiguration); + return new Interpreter(Interpreter.EMPTY_KNOWLEDGE_BASE_PROVIDER, (kb) -> Mockito.mock(Reasoner.class), printer, + parserConfiguration); } @Test diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java new file mode 100644 index 000000000..421f1d5b2 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java @@ -0,0 +1,57 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.Command; + +public class ClearCommandInterpreter implements CommandInterpreter { + + static final String TASK_ALL = "ALL"; + static final String TASK_INFERENCES = "INF"; + + @Override + public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + Interpreter.validateArgumentCount(command, 1); + String task = Interpreter.extractNameArgument(command, 0, "task"); + if (TASK_ALL.equals(task)) { + interpreter.clearReasonerAndKnowledgeBase(); + interpreter.printNormal("Knowledge has been cleared; reasoner has been completely reset.\n"); + } else if (TASK_INFERENCES.equals(task)) { + interpreter.getReasoner().resetReasoner(); + interpreter.printNormal("Reasoner has been reset.\n"); + } else { + throw new CommandExecutionException( + "Task \"" + task + "\" not supported; should be one of: " + TASK_ALL + ", " + TASK_INFERENCES); + } + } + + @Override + public void printHelp(String commandName, Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " TASK\n" + // + " TASK: what to reset, ALL (knowledge base), INF (inferences)\n"); + } + + @Override + public String getSynopsis() { + return "discards the knowledge base and/or previously computed inferences"; + } + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index d75e8e235..447a50ca2 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -4,6 +4,7 @@ import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; +import java.io.IOException; import java.io.InputStream; import java.io.OutputStreamWriter; import java.io.Writer; @@ -35,6 +36,7 @@ import java.util.Set; import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Terms; @@ -46,17 +48,39 @@ import org.semanticweb.rulewerk.parser.javacc.ParseException; import org.semanticweb.rulewerk.parser.javacc.TokenMgrError; -public class Interpreter { +public class Interpreter implements AutoCloseable { - final Reasoner reasoner; + @FunctionalInterface + public interface ReasonerProvider { + public Reasoner reasoner(KnowledgeBase knowledgeBase); + } + + @FunctionalInterface + public interface KnowledgeBaseProvider { + public KnowledgeBase knowledgeBase(); + } + + final public static KnowledgeBaseProvider EMPTY_KNOWLEDGE_BASE_PROVIDER = new KnowledgeBaseProvider() { + @Override + public KnowledgeBase knowledgeBase() { + return new KnowledgeBase(); + } + }; + + final ReasonerProvider reasonerProvider; + final KnowledgeBaseProvider knowledgeBaseProvider; + + Reasoner reasoner = null; final StyledPrinter printer; final ParserConfiguration parserConfiguration; final LinkedHashMap commandInterpreters = new LinkedHashMap<>(); - public Interpreter(final Reasoner reasoner, final StyledPrinter printer, - final ParserConfiguration parserConfiguration) { - this.reasoner = reasoner; + public Interpreter(final KnowledgeBaseProvider knowledgeBaseProvider, final ReasonerProvider reasonerProvider, + final StyledPrinter printer, final ParserConfiguration parserConfiguration) { + this.knowledgeBaseProvider = knowledgeBaseProvider; + this.reasonerProvider = reasonerProvider; + clearReasonerAndKnowledgeBase(); this.printer = printer; this.parserConfiguration = parserConfiguration; this.registerDefaultCommandInterpreters(); @@ -157,6 +181,7 @@ private void registerDefaultCommandInterpreters() { this.registerCommandInterpreter("addsource", new AddSourceCommandInterpreter()); this.registerCommandInterpreter("delsource", new RemoveSourceCommandInterpreter()); this.registerCommandInterpreter("setprefix", new SetPrefixCommandInterpreter()); + this.registerCommandInterpreter("clear", new ClearCommandInterpreter()); this.registerCommandInterpreter("reason", new ReasonCommandInterpreter()); this.registerCommandInterpreter("query", new QueryCommandInterpreter()); this.registerCommandInterpreter("export", new ExportCommandInterpreter()); @@ -231,4 +256,36 @@ public InputStream getFileInputStream(String fileName) throws FileNotFoundExcept return new FileInputStream(fileName); } + /** + * Completely resets the reasoner and knowledge base. All inferences and + * statements are cleared. + */ + public void clearReasonerAndKnowledgeBase() { + closeReasoner(); + reasoner = reasonerProvider.reasoner(knowledgeBaseProvider.knowledgeBase()); + try { + reasoner.reason(); + } catch (IOException e) { + throw new RulewerkRuntimeException("Failed to initialise reasoner: " + e.getMessage(), e); + } + } + + /** + * Frees all resources, especially those associated with reasoning. + */ + @Override + public void close() { + closeReasoner(); + } + + /** + * Closes and discards the internal {@link Reasoner}. + */ + private void closeReasoner() { + if (reasoner != null) { + reasoner.close(); + reasoner = null; + } + } + } diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java new file mode 100644 index 000000000..3baf1df69 --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java @@ -0,0 +1,130 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.IOException; +import java.io.StringWriter; +import java.util.Arrays; + +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; +import org.semanticweb.rulewerk.parser.ParserConfiguration; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class ClearCommandInterpreterTest { + + @Test + public void correctUseAll_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = Mockito.spy(InterpreterTest.getMockInterpreter(writer)); + Predicate predicate = Expressions.makePredicate("p", 1); + Term term = Expressions.makeAbstractConstant("a"); + Fact fact = Expressions.makeFact(predicate, term); + interpreter.getKnowledgeBase().addStatement(fact); + + assertEquals(1, interpreter.getKnowledgeBase().getFacts().size()); + + Command command = interpreter.parseCommand("@clear ALL ."); + interpreter.runCommand(command); + + assertTrue(interpreter.getKnowledgeBase().getFacts().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + Mockito.verify(interpreter).clearReasonerAndKnowledgeBase(); + } + + @Test + public void correctUseInf_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + SimpleStyledPrinter printer = new SimpleStyledPrinter(writer); + ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + final KnowledgeBase knowledgeBase = new KnowledgeBase(); + final Reasoner reasoner = Mockito.spy(Reasoner.class); + Mockito.when(reasoner.getKnowledgeBase()).thenReturn(knowledgeBase); + try (Interpreter interpreter = new Interpreter(() -> knowledgeBase, (kb) -> reasoner, printer, + parserConfiguration)) { + Predicate predicate = Expressions.makePredicate("p", 1); + Term term = Expressions.makeAbstractConstant("a"); + Fact fact = Expressions.makeFact(predicate, term); + interpreter.getKnowledgeBase().addStatement(fact); + + Command command = interpreter.parseCommand("@clear INF ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + Mockito.verify(reasoner).resetReasoner(); + } + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCount_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@clear ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentType_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@clear \"string\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void unkonwnTask_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@clear UNKNOWNTASK ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new ClearCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new ClearCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/InterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/InterpreterTest.java index 74e517e33..443ed8baa 100644 --- a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/InterpreterTest.java +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/InterpreterTest.java @@ -40,10 +40,11 @@ public class InterpreterTest { static public Interpreter getMockInterpreter(Writer writer) { SimpleStyledPrinter printer = new SimpleStyledPrinter(writer); ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); - KnowledgeBase knowledgeBase = new KnowledgeBase(); - Reasoner reasoner = Mockito.mock(Reasoner.class); - Mockito.when(reasoner.getKnowledgeBase()).thenReturn(knowledgeBase); - return new Interpreter(reasoner, printer, parserConfiguration); + return new Interpreter(Interpreter.EMPTY_KNOWLEDGE_BASE_PROVIDER, (knowledgeBase) -> { + Reasoner reasoner = Mockito.mock(Reasoner.class); + Mockito.when(reasoner.getKnowledgeBase()).thenReturn(knowledgeBase); + return reasoner; + }, printer, parserConfiguration); } /** @@ -73,45 +74,47 @@ public void getters_succeed() { StringWriter writer = new StringWriter(); SimpleStyledPrinter printer = new SimpleStyledPrinter(writer); ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); - KnowledgeBase knowledgeBase = new KnowledgeBase(); - Reasoner reasoner = Mockito.mock(Reasoner.class); + final KnowledgeBase knowledgeBase = new KnowledgeBase(); + final Reasoner reasoner = Mockito.mock(Reasoner.class); Mockito.when(reasoner.getKnowledgeBase()).thenReturn(knowledgeBase); - Interpreter interpreter = new Interpreter(reasoner, printer, parserConfiguration); - - assertEquals(knowledgeBase, interpreter.getKnowledgeBase()); - assertEquals(reasoner, interpreter.getReasoner()); - assertEquals(writer, interpreter.getWriter()); - assertEquals(parserConfiguration, interpreter.getParserConfiguration()); + try (Interpreter interpreter = new Interpreter(() -> knowledgeBase, (kb) -> reasoner, printer, + parserConfiguration)) { + assertEquals(knowledgeBase, interpreter.getKnowledgeBase()); + assertEquals(reasoner, interpreter.getReasoner()); + assertEquals(writer, interpreter.getWriter()); + assertEquals(parserConfiguration, interpreter.getParserConfiguration()); + } } @Test(expected = CommandExecutionException.class) public void unknownCommand_fails() throws ParsingException, CommandExecutionException { StringWriter writer = new StringWriter(); - Interpreter interpreter = getMockInterpreter(writer); - - Command command = interpreter.parseCommand("@unknown ."); - interpreter.runCommand(command); + try (Interpreter interpreter = getMockInterpreter(writer)) { + Command command = interpreter.parseCommand("@unknown ."); + interpreter.runCommand(command); + } } @Test(expected = ParsingException.class) public void malformedCommand_fails() throws ParsingException { StringWriter writer = new StringWriter(); - Interpreter interpreter = getMockInterpreter(writer); - - interpreter.parseCommand("malformed ."); + try (Interpreter interpreter = getMockInterpreter(writer)) { + interpreter.parseCommand("malformed ."); + } } @Test public void prefixesAreUsed_succeeds() throws ParsingException, PrefixDeclarationException { StringWriter writer = new StringWriter(); - Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); - interpreter.getKnowledgeBase().getPrefixDeclarationRegistry().setPrefixIri("eg:", "http://example.org/"); + try (Interpreter interpreter = InterpreterTest.getMockInterpreter(writer)) { + interpreter.getKnowledgeBase().getPrefixDeclarationRegistry().setPrefixIri("eg:", "http://example.org/"); - Command command = interpreter.parseCommand("@somecommand eg:test ."); + Command command = interpreter.parseCommand("@somecommand eg:test ."); - assertEquals(1, command.getArguments().size()); - assertTrue(command.getArguments().get(0).fromTerm().isPresent()); - assertEquals("http://example.org/test", command.getArguments().get(0).fromTerm().get().getName()); + assertEquals(1, command.getArguments().size()); + assertTrue(command.getArguments().get(0).fromTerm().isPresent()); + assertEquals("http://example.org/test", command.getArguments().get(0).fromTerm().get().getName()); + } } @Test @@ -119,21 +122,20 @@ public void print_succeeds() { StringWriter writer = new StringWriter(); SimpleStyledPrinter printer = Mockito.spy(new SimpleStyledPrinter(writer)); ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); - Reasoner reasoner = Mockito.mock(Reasoner.class); - Interpreter interpreter = new Interpreter(reasoner, printer, parserConfiguration); - - interpreter.printCode("Code"); - interpreter.printNormal("Normal"); - interpreter.printEmph("Emph"); - interpreter.printSection("Section"); - interpreter.printImportant("Important"); - - Mockito.verify(printer).printCode("Code"); - Mockito.verify(printer).printNormal("Normal"); - Mockito.verify(printer).printEmph("Emph"); - Mockito.verify(printer).printSection("Section"); - Mockito.verify(printer).printImportant("Important"); - + try (Interpreter interpreter = new Interpreter(Interpreter.EMPTY_KNOWLEDGE_BASE_PROVIDER, + (kb) -> Mockito.mock(Reasoner.class), printer, parserConfiguration)) { + interpreter.printCode("Code"); + interpreter.printNormal("Normal"); + interpreter.printEmph("Emph"); + interpreter.printSection("Section"); + interpreter.printImportant("Important"); + + Mockito.verify(printer).printCode("Code"); + Mockito.verify(printer).printNormal("Normal"); + Mockito.verify(printer).printEmph("Emph"); + Mockito.verify(printer).printSection("Section"); + Mockito.verify(printer).printImportant("Important"); + } } } From 9520d6049fb1b01c21d9dc733158dad18267dcd0 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 27 Aug 2020 11:07:48 +0200 Subject: [PATCH 0773/1003] Better help --- .../rulewerk/commands/ClearCommandInterpreter.java | 2 +- .../semanticweb/rulewerk/commands/LoadCommandInterpreter.java | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java index 421f1d5b2..f36c78dfb 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java @@ -33,7 +33,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio String task = Interpreter.extractNameArgument(command, 0, "task"); if (TASK_ALL.equals(task)) { interpreter.clearReasonerAndKnowledgeBase(); - interpreter.printNormal("Knowledge has been cleared; reasoner has been completely reset.\n"); + interpreter.printNormal("Knowledge base has been cleared; reasoner has been completely reset.\n"); } else if (TASK_INFERENCES.equals(task)) { interpreter.getReasoner().resetReasoner(); interpreter.printNormal("Reasoner has been reset.\n"); diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index a5dd3196c..62878f8be 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -199,12 +199,12 @@ private Model parseRdfFromStream(InputStream inputStream, RDFFormat rdfFormat, S @Override public void printHelp(String commandName, Interpreter interpreter) { - interpreter.printNormal("Usage: @" + commandName + " [TASK] [RDF predicate]\n" // + interpreter.printNormal("Usage: @" + commandName + " [TASK] \"file\" [RDF predicate]\n" // + " TASK: optional; one of RULES (default), OWL, RDF:\n" // + " RULES to load a knowledge base in Rulewerk rls format\n" // + " OWL to load an OWL ontology and convert it to facts and rules\n" // + " RDF to load an RDF document and convert it to facts\n" // - + " file: path to the file to load\n" // + + " \"file\": path to the file to load, enclosed in quotes\n" // + " RDF predicate: optional name of the predicate used for loading RDF\n" // + " triples (default: TRIPLE); use ABOX to load triples\n" // + " like OWL assertions, using unary and binary predicates\n"); From f1fac9a57b1bd672c85255de08d9d8f111fcb889 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 27 Aug 2020 11:27:18 +0200 Subject: [PATCH 0774/1003] support clear FACTS and clear RULES --- .../commands/ClearCommandInterpreter.java | 14 ++++ .../commands/ClearCommandInterpreterTest.java | 82 +++++++++++++++++-- 2 files changed, 88 insertions(+), 8 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java index f36c78dfb..6c4e3c17f 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java @@ -21,11 +21,15 @@ */ import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Rule; public class ClearCommandInterpreter implements CommandInterpreter { static final String TASK_ALL = "ALL"; static final String TASK_INFERENCES = "INF"; + static final String TASK_FACTS = "FACTS"; + static final String TASK_RULES = "RULES"; @Override public void run(Command command, Interpreter interpreter) throws CommandExecutionException { @@ -37,6 +41,16 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } else if (TASK_INFERENCES.equals(task)) { interpreter.getReasoner().resetReasoner(); interpreter.printNormal("Reasoner has been reset.\n"); + } else if (TASK_FACTS.equals(task)) { + for (Fact fact : interpreter.getKnowledgeBase().getFacts()) { + interpreter.getKnowledgeBase().removeStatement(fact); + } + interpreter.printNormal("All facts have been removed from the knowledge base.\n"); + } else if (TASK_RULES.equals(task)) { + for (Rule rule : interpreter.getKnowledgeBase().getRules()) { + interpreter.getKnowledgeBase().removeStatement(rule); + } + interpreter.printNormal("All rules have been removed from the knowledge base.\n"); } else { throw new CommandExecutionException( "Task \"" + task + "\" not supported; should be one of: " + TASK_ALL + ", " + TASK_INFERENCES); diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java index 3baf1df69..afbce0729 100644 --- a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java @@ -30,7 +30,9 @@ import org.mockito.Mockito; import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; @@ -45,10 +47,17 @@ public class ClearCommandInterpreterTest { public void correctUseAll_succeeds() throws ParsingException, CommandExecutionException, IOException { StringWriter writer = new StringWriter(); Interpreter interpreter = Mockito.spy(InterpreterTest.getMockInterpreter(writer)); - Predicate predicate = Expressions.makePredicate("p", 1); - Term term = Expressions.makeAbstractConstant("a"); - Fact fact = Expressions.makeFact(predicate, term); + Term a = Expressions.makeAbstractConstant("a"); + Term x = Expressions.makeUniversalVariable("X"); + Predicate p = Expressions.makePredicate("p", 1); + Predicate q = Expressions.makePredicate("q", 1); + Predicate r = Expressions.makePredicate("r", 1); + Fact fact = Expressions.makeFact(p, a); + PositiveLiteral headLiteral = Expressions.makePositiveLiteral(q, x); + PositiveLiteral bodyLiteral = Expressions.makePositiveLiteral(r, x); + Rule rule = Expressions.makeRule(headLiteral, bodyLiteral); interpreter.getKnowledgeBase().addStatement(fact); + interpreter.getKnowledgeBase().addStatement(rule); assertEquals(1, interpreter.getKnowledgeBase().getFacts().size()); @@ -71,21 +80,78 @@ public void correctUseInf_succeeds() throws ParsingException, CommandExecutionEx Mockito.when(reasoner.getKnowledgeBase()).thenReturn(knowledgeBase); try (Interpreter interpreter = new Interpreter(() -> knowledgeBase, (kb) -> reasoner, printer, parserConfiguration)) { - Predicate predicate = Expressions.makePredicate("p", 1); - Term term = Expressions.makeAbstractConstant("a"); - Fact fact = Expressions.makeFact(predicate, term); + Term a = Expressions.makeAbstractConstant("a"); + Term x = Expressions.makeUniversalVariable("X"); + Predicate p = Expressions.makePredicate("p", 1); + Predicate q = Expressions.makePredicate("q", 1); + Predicate r = Expressions.makePredicate("r", 1); + Fact fact = Expressions.makeFact(p, a); + PositiveLiteral headLiteral = Expressions.makePositiveLiteral(q, x); + PositiveLiteral bodyLiteral = Expressions.makePositiveLiteral(r, x); + Rule rule = Expressions.makeRule(headLiteral, bodyLiteral); interpreter.getKnowledgeBase().addStatement(fact); + interpreter.getKnowledgeBase().addStatement(rule); Command command = interpreter.parseCommand("@clear INF ."); interpreter.runCommand(command); assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); - assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertEquals(Arrays.asList(rule), interpreter.getKnowledgeBase().getRules()); assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); Mockito.verify(reasoner).resetReasoner(); } } + @Test + public void correctUseFacts_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + try (Interpreter interpreter = InterpreterTest.getMockInterpreter(writer)) { + Term a = Expressions.makeAbstractConstant("a"); + Term x = Expressions.makeUniversalVariable("X"); + Predicate p = Expressions.makePredicate("p", 1); + Predicate q = Expressions.makePredicate("q", 1); + Predicate r = Expressions.makePredicate("r", 1); + Fact fact = Expressions.makeFact(p, a); + PositiveLiteral headLiteral = Expressions.makePositiveLiteral(q, x); + PositiveLiteral bodyLiteral = Expressions.makePositiveLiteral(r, x); + Rule rule = Expressions.makeRule(headLiteral, bodyLiteral); + interpreter.getKnowledgeBase().addStatement(fact); + interpreter.getKnowledgeBase().addStatement(rule); + + Command command = interpreter.parseCommand("@clear FACTS ."); + interpreter.runCommand(command); + + assertTrue(interpreter.getKnowledgeBase().getFacts().isEmpty()); + assertEquals(Arrays.asList(rule), interpreter.getKnowledgeBase().getRules()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + } + } + + @Test + public void correctUseRules_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + try (Interpreter interpreter = InterpreterTest.getMockInterpreter(writer)) { + Term a = Expressions.makeAbstractConstant("a"); + Term x = Expressions.makeUniversalVariable("X"); + Predicate p = Expressions.makePredicate("p", 1); + Predicate q = Expressions.makePredicate("q", 1); + Predicate r = Expressions.makePredicate("r", 1); + Fact fact = Expressions.makeFact(p, a); + PositiveLiteral headLiteral = Expressions.makePositiveLiteral(q, x); + PositiveLiteral bodyLiteral = Expressions.makePositiveLiteral(r, x); + Rule rule = Expressions.makeRule(headLiteral, bodyLiteral); + interpreter.getKnowledgeBase().addStatement(fact); + interpreter.getKnowledgeBase().addStatement(rule); + + Command command = interpreter.parseCommand("@clear RULES ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + } + } + @Test(expected = CommandExecutionException.class) public void wrongArgumentCount_fails() throws ParsingException, CommandExecutionException { StringWriter writer = new StringWriter(); @@ -103,7 +169,7 @@ public void wrongArgumentType_fails() throws ParsingException, CommandExecutionE Command command = interpreter.parseCommand("@clear \"string\" ."); interpreter.runCommand(command); } - + @Test(expected = CommandExecutionException.class) public void unkonwnTask_fails() throws ParsingException, CommandExecutionException { StringWriter writer = new StringWriter(); From 48739a9e5fd16e65c4e1bad3068dfd7e00525878 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 27 Aug 2020 12:07:24 +0200 Subject: [PATCH 0775/1003] support @clear PREFIXES command --- .../commands/ClearCommandInterpreter.java | 17 ++- .../commands/ClearCommandInterpreterTest.java | 121 +++++++++++------- .../model/api/PrefixDeclarationRegistry.java | 7 +- .../AbstractPrefixDeclarationRegistry.java | 6 + .../MergingPrefixDeclarationRegistryTest.java | 8 ++ 5 files changed, 105 insertions(+), 54 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java index 6c4e3c17f..6644e3bed 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java @@ -30,6 +30,7 @@ public class ClearCommandInterpreter implements CommandInterpreter { static final String TASK_INFERENCES = "INF"; static final String TASK_FACTS = "FACTS"; static final String TASK_RULES = "RULES"; + static final String TASK_PREFIXES = "PREFIXES"; @Override public void run(Command command, Interpreter interpreter) throws CommandExecutionException { @@ -51,16 +52,24 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio interpreter.getKnowledgeBase().removeStatement(rule); } interpreter.printNormal("All rules have been removed from the knowledge base.\n"); + } else if (TASK_PREFIXES.equals(task)) { + interpreter.getKnowledgeBase().getPrefixDeclarationRegistry().clear(); + interpreter.printNormal("All prefixes and the base namespace have been removed from the knowledge base.\n"); } else { - throw new CommandExecutionException( - "Task \"" + task + "\" not supported; should be one of: " + TASK_ALL + ", " + TASK_INFERENCES); + throw new CommandExecutionException("Task \"" + task + "\" not supported; should be one of: " + TASK_ALL + + ", " + TASK_INFERENCES + ", " + TASK_FACTS + ", " + TASK_RULES + ", " + TASK_PREFIXES); } } @Override public void printHelp(String commandName, Interpreter interpreter) { - interpreter.printNormal("Usage: @" + commandName + " TASK\n" + // - " TASK: what to reset, ALL (knowledge base), INF (inferences)\n"); + interpreter.printNormal("Usage: @" + commandName + " TASK\n" // + + " TASK: what to reset, possuble values:\n" // + + " ALL: empty knowledge base and completely reset reasoner\n" // + + " INF: reset reasoner to clear all loaded data and inferences\n" // + + " FACTS: remove all facts from knowledge base\n" // + + " RULES: remove all rules from knowledge base\n" // + + " PREFIXES: undeclare all prefixes and base namespace\n"); } @Override diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java index afbce0729..b9f2fe29d 100644 --- a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java @@ -22,16 +22,24 @@ import static org.junit.Assert.*; -import java.io.IOException; import java.io.StringWriter; import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.StreamSupport; import org.junit.Test; import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Rule; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.Expressions; @@ -43,21 +51,39 @@ public class ClearCommandInterpreterTest { + static Term a = Expressions.makeAbstractConstant("a"); + static Term x = Expressions.makeUniversalVariable("X"); + static Predicate p = Expressions.makePredicate("p", 1); + static Predicate q = Expressions.makePredicate("q", 1); + static Predicate r = Expressions.makePredicate("r", 1); + static Fact fact = Expressions.makeFact(p, a); + static PositiveLiteral headLiteral = Expressions.makePositiveLiteral(q, x); + static PositiveLiteral bodyLiteral = Expressions.makePositiveLiteral(r, x); + static Rule rule = Expressions.makeRule(headLiteral, bodyLiteral); + static Map standardPrefixes = new HashMap<>(); + static { + standardPrefixes.put("eg:", "http://example.org/"); + } + + private void prepareKnowledgeBase(KnowledgeBase knowledgeBase) throws PrefixDeclarationException { + knowledgeBase.addStatement(fact); + knowledgeBase.addStatement(rule); + knowledgeBase.getPrefixDeclarationRegistry().setPrefixIri("eg:", "http://example.org/"); + } + + private void assertPrefixesEqual(Map expectedPrefixes, + PrefixDeclarationRegistry prefixDeclarationRegistry) { + Set> prefixes = StreamSupport.stream(prefixDeclarationRegistry.spliterator(), false) + .collect(Collectors.toSet()); + assertEquals(expectedPrefixes.entrySet(), prefixes); + } + @Test - public void correctUseAll_succeeds() throws ParsingException, CommandExecutionException, IOException { + public void correctUseAll_succeeds() + throws ParsingException, CommandExecutionException, PrefixDeclarationException { StringWriter writer = new StringWriter(); Interpreter interpreter = Mockito.spy(InterpreterTest.getMockInterpreter(writer)); - Term a = Expressions.makeAbstractConstant("a"); - Term x = Expressions.makeUniversalVariable("X"); - Predicate p = Expressions.makePredicate("p", 1); - Predicate q = Expressions.makePredicate("q", 1); - Predicate r = Expressions.makePredicate("r", 1); - Fact fact = Expressions.makeFact(p, a); - PositiveLiteral headLiteral = Expressions.makePositiveLiteral(q, x); - PositiveLiteral bodyLiteral = Expressions.makePositiveLiteral(r, x); - Rule rule = Expressions.makeRule(headLiteral, bodyLiteral); - interpreter.getKnowledgeBase().addStatement(fact); - interpreter.getKnowledgeBase().addStatement(rule); + prepareKnowledgeBase(interpreter.getKnowledgeBase()); assertEquals(1, interpreter.getKnowledgeBase().getFacts().size()); @@ -67,11 +93,13 @@ public void correctUseAll_succeeds() throws ParsingException, CommandExecutionEx assertTrue(interpreter.getKnowledgeBase().getFacts().isEmpty()); assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + assertPrefixesEqual(Collections.emptyMap(), interpreter.getKnowledgeBase().getPrefixDeclarationRegistry()); Mockito.verify(interpreter).clearReasonerAndKnowledgeBase(); } @Test - public void correctUseInf_succeeds() throws ParsingException, CommandExecutionException, IOException { + public void correctUseInf_succeeds() + throws ParsingException, CommandExecutionException, PrefixDeclarationException { StringWriter writer = new StringWriter(); SimpleStyledPrinter printer = new SimpleStyledPrinter(writer); ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); @@ -80,17 +108,7 @@ public void correctUseInf_succeeds() throws ParsingException, CommandExecutionEx Mockito.when(reasoner.getKnowledgeBase()).thenReturn(knowledgeBase); try (Interpreter interpreter = new Interpreter(() -> knowledgeBase, (kb) -> reasoner, printer, parserConfiguration)) { - Term a = Expressions.makeAbstractConstant("a"); - Term x = Expressions.makeUniversalVariable("X"); - Predicate p = Expressions.makePredicate("p", 1); - Predicate q = Expressions.makePredicate("q", 1); - Predicate r = Expressions.makePredicate("r", 1); - Fact fact = Expressions.makeFact(p, a); - PositiveLiteral headLiteral = Expressions.makePositiveLiteral(q, x); - PositiveLiteral bodyLiteral = Expressions.makePositiveLiteral(r, x); - Rule rule = Expressions.makeRule(headLiteral, bodyLiteral); - interpreter.getKnowledgeBase().addStatement(fact); - interpreter.getKnowledgeBase().addStatement(rule); + prepareKnowledgeBase(interpreter.getKnowledgeBase()); Command command = interpreter.parseCommand("@clear INF ."); interpreter.runCommand(command); @@ -98,25 +116,17 @@ public void correctUseInf_succeeds() throws ParsingException, CommandExecutionEx assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); assertEquals(Arrays.asList(rule), interpreter.getKnowledgeBase().getRules()); assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + assertPrefixesEqual(standardPrefixes, interpreter.getKnowledgeBase().getPrefixDeclarationRegistry()); Mockito.verify(reasoner).resetReasoner(); } } @Test - public void correctUseFacts_succeeds() throws ParsingException, CommandExecutionException, IOException { + public void correctUseFacts_succeeds() + throws ParsingException, CommandExecutionException, PrefixDeclarationException { StringWriter writer = new StringWriter(); try (Interpreter interpreter = InterpreterTest.getMockInterpreter(writer)) { - Term a = Expressions.makeAbstractConstant("a"); - Term x = Expressions.makeUniversalVariable("X"); - Predicate p = Expressions.makePredicate("p", 1); - Predicate q = Expressions.makePredicate("q", 1); - Predicate r = Expressions.makePredicate("r", 1); - Fact fact = Expressions.makeFact(p, a); - PositiveLiteral headLiteral = Expressions.makePositiveLiteral(q, x); - PositiveLiteral bodyLiteral = Expressions.makePositiveLiteral(r, x); - Rule rule = Expressions.makeRule(headLiteral, bodyLiteral); - interpreter.getKnowledgeBase().addStatement(fact); - interpreter.getKnowledgeBase().addStatement(rule); + prepareKnowledgeBase(interpreter.getKnowledgeBase()); Command command = interpreter.parseCommand("@clear FACTS ."); interpreter.runCommand(command); @@ -124,24 +134,16 @@ public void correctUseFacts_succeeds() throws ParsingException, CommandExecution assertTrue(interpreter.getKnowledgeBase().getFacts().isEmpty()); assertEquals(Arrays.asList(rule), interpreter.getKnowledgeBase().getRules()); assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + assertPrefixesEqual(standardPrefixes, interpreter.getKnowledgeBase().getPrefixDeclarationRegistry()); } } @Test - public void correctUseRules_succeeds() throws ParsingException, CommandExecutionException, IOException { + public void correctUseRules_succeeds() + throws ParsingException, CommandExecutionException, PrefixDeclarationException { StringWriter writer = new StringWriter(); try (Interpreter interpreter = InterpreterTest.getMockInterpreter(writer)) { - Term a = Expressions.makeAbstractConstant("a"); - Term x = Expressions.makeUniversalVariable("X"); - Predicate p = Expressions.makePredicate("p", 1); - Predicate q = Expressions.makePredicate("q", 1); - Predicate r = Expressions.makePredicate("r", 1); - Fact fact = Expressions.makeFact(p, a); - PositiveLiteral headLiteral = Expressions.makePositiveLiteral(q, x); - PositiveLiteral bodyLiteral = Expressions.makePositiveLiteral(r, x); - Rule rule = Expressions.makeRule(headLiteral, bodyLiteral); - interpreter.getKnowledgeBase().addStatement(fact); - interpreter.getKnowledgeBase().addStatement(rule); + prepareKnowledgeBase(interpreter.getKnowledgeBase()); Command command = interpreter.parseCommand("@clear RULES ."); interpreter.runCommand(command); @@ -149,6 +151,27 @@ public void correctUseRules_succeeds() throws ParsingException, CommandExecution assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + assertPrefixesEqual(standardPrefixes, interpreter.getKnowledgeBase().getPrefixDeclarationRegistry()); + } + } + + @Test + public void correctUsePrefixes_succeeds() + throws ParsingException, CommandExecutionException, PrefixDeclarationException { + StringWriter writer = new StringWriter(); + try (Interpreter interpreter = InterpreterTest.getMockInterpreter(writer)) { + + interpreter.getKnowledgeBase().addStatement(fact); + interpreter.getKnowledgeBase().addStatement(rule); + interpreter.getKnowledgeBase().getPrefixDeclarationRegistry().setPrefixIri("eg:", "http://example.org/"); + + Command command = interpreter.parseCommand("@clear PREFIXES ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertEquals(Arrays.asList(rule), interpreter.getKnowledgeBase().getRules()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + assertPrefixesEqual(Collections.emptyMap(), interpreter.getKnowledgeBase().getPrefixDeclarationRegistry()); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java index 37d62280e..1532c706c 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java @@ -44,13 +44,18 @@ public interface PrefixDeclarationRegistry extends Iterable(); + } + @Override public String getBaseIri() { if (baseIri == null) { diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java index cc46e3035..157fbfded 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java @@ -105,6 +105,14 @@ public void setPrefixIri_redeclarePrefix_succeeds() throws PrefixDeclarationExce assertEquals(2, StreamSupport.stream(prefixDeclarations.spliterator(), false).count()); } + @Test + public void clearPrefix_succeeds() throws PrefixDeclarationException { + prefixDeclarations.setPrefixIri("eg:", BASE); + prefixDeclarations.setPrefixIri("another:", MORE_SPECIFIC); + prefixDeclarations.clear(); + assertEquals(0, StreamSupport.stream(prefixDeclarations.spliterator(), false).count()); + } + @Test public void setPrefixIri_setSamePrefix_succeeds() throws PrefixDeclarationException { prefixDeclarations.setPrefixIri("eg:", BASE); From 7d9858e07ea6f05a3d133e5f5f910f23c6e553b1 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 27 Aug 2020 12:16:40 +0200 Subject: [PATCH 0776/1003] support clear DATASOURCES --- .../commands/ClearCommandInterpreter.java | 16 ++++++-- .../commands/ClearCommandInterpreterTest.java | 37 ++++++++++++++++--- 2 files changed, 44 insertions(+), 9 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java index 6644e3bed..b27bda588 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java @@ -21,6 +21,7 @@ */ import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.Rule; @@ -30,6 +31,7 @@ public class ClearCommandInterpreter implements CommandInterpreter { static final String TASK_INFERENCES = "INF"; static final String TASK_FACTS = "FACTS"; static final String TASK_RULES = "RULES"; + static final String TASK_SOURCES = "DATASOURCES"; static final String TASK_PREFIXES = "PREFIXES"; @Override @@ -52,12 +54,19 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio interpreter.getKnowledgeBase().removeStatement(rule); } interpreter.printNormal("All rules have been removed from the knowledge base.\n"); + } else if (TASK_SOURCES.equals(task)) { + for (DataSourceDeclaration dataSourceDeclaration : interpreter.getKnowledgeBase() + .getDataSourceDeclarations()) { + interpreter.getKnowledgeBase().removeStatement(dataSourceDeclaration); + } + interpreter.printNormal("All datasource declarations have been removed from the knowledge base.\n"); } else if (TASK_PREFIXES.equals(task)) { interpreter.getKnowledgeBase().getPrefixDeclarationRegistry().clear(); interpreter.printNormal("All prefixes and the base namespace have been removed from the knowledge base.\n"); } else { - throw new CommandExecutionException("Task \"" + task + "\" not supported; should be one of: " + TASK_ALL - + ", " + TASK_INFERENCES + ", " + TASK_FACTS + ", " + TASK_RULES + ", " + TASK_PREFIXES); + throw new CommandExecutionException( + "Task \"" + task + "\" not supported; should be one of: " + TASK_ALL + ", " + TASK_INFERENCES + + ", " + TASK_FACTS + ", " + TASK_RULES + ", " + TASK_SOURCES + ", " + TASK_PREFIXES); } } @@ -69,12 +78,13 @@ public void printHelp(String commandName, Interpreter interpreter) { + " INF: reset reasoner to clear all loaded data and inferences\n" // + " FACTS: remove all facts from knowledge base\n" // + " RULES: remove all rules from knowledge base\n" // + + " DATASOURCES: remove all data source declarations from knowledge base\n" // + " PREFIXES: undeclare all prefixes and base namespace\n"); } @Override public String getSynopsis() { - return "discards the knowledge base and/or previously computed inferences"; + return "discards (parts of) the knowledge base or computed inferences"; } } diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java index b9f2fe29d..207cc68e1 100644 --- a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java @@ -36,12 +36,15 @@ import org.mockito.Mockito; import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Rule; import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; @@ -64,10 +67,13 @@ public class ClearCommandInterpreterTest { static { standardPrefixes.put("eg:", "http://example.org/"); } + static DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(p, + Mockito.mock(DataSource.class)); private void prepareKnowledgeBase(KnowledgeBase knowledgeBase) throws PrefixDeclarationException { knowledgeBase.addStatement(fact); knowledgeBase.addStatement(rule); + knowledgeBase.addStatement(dataSourceDeclaration); knowledgeBase.getPrefixDeclarationRegistry().setPrefixIri("eg:", "http://example.org/"); } @@ -115,7 +121,8 @@ public void correctUseInf_succeeds() assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); assertEquals(Arrays.asList(rule), interpreter.getKnowledgeBase().getRules()); - assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + assertEquals(Arrays.asList(dataSourceDeclaration), + interpreter.getKnowledgeBase().getDataSourceDeclarations()); assertPrefixesEqual(standardPrefixes, interpreter.getKnowledgeBase().getPrefixDeclarationRegistry()); Mockito.verify(reasoner).resetReasoner(); } @@ -133,7 +140,8 @@ public void correctUseFacts_succeeds() assertTrue(interpreter.getKnowledgeBase().getFacts().isEmpty()); assertEquals(Arrays.asList(rule), interpreter.getKnowledgeBase().getRules()); - assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + assertEquals(Arrays.asList(dataSourceDeclaration), + interpreter.getKnowledgeBase().getDataSourceDeclarations()); assertPrefixesEqual(standardPrefixes, interpreter.getKnowledgeBase().getPrefixDeclarationRegistry()); } } @@ -150,6 +158,24 @@ public void correctUseRules_succeeds() assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertEquals(Arrays.asList(dataSourceDeclaration), + interpreter.getKnowledgeBase().getDataSourceDeclarations()); + assertPrefixesEqual(standardPrefixes, interpreter.getKnowledgeBase().getPrefixDeclarationRegistry()); + } + } + + @Test + public void correctUseSources_succeeds() + throws ParsingException, CommandExecutionException, PrefixDeclarationException { + StringWriter writer = new StringWriter(); + try (Interpreter interpreter = InterpreterTest.getMockInterpreter(writer)) { + prepareKnowledgeBase(interpreter.getKnowledgeBase()); + + Command command = interpreter.parseCommand("@clear DATASOURCES ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertEquals(Arrays.asList(rule), interpreter.getKnowledgeBase().getRules()); assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); assertPrefixesEqual(standardPrefixes, interpreter.getKnowledgeBase().getPrefixDeclarationRegistry()); } @@ -161,16 +187,15 @@ public void correctUsePrefixes_succeeds() StringWriter writer = new StringWriter(); try (Interpreter interpreter = InterpreterTest.getMockInterpreter(writer)) { - interpreter.getKnowledgeBase().addStatement(fact); - interpreter.getKnowledgeBase().addStatement(rule); - interpreter.getKnowledgeBase().getPrefixDeclarationRegistry().setPrefixIri("eg:", "http://example.org/"); + prepareKnowledgeBase(interpreter.getKnowledgeBase()); Command command = interpreter.parseCommand("@clear PREFIXES ."); interpreter.runCommand(command); assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); assertEquals(Arrays.asList(rule), interpreter.getKnowledgeBase().getRules()); - assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + assertEquals(Arrays.asList(dataSourceDeclaration), + interpreter.getKnowledgeBase().getDataSourceDeclarations()); assertPrefixesEqual(Collections.emptyMap(), interpreter.getKnowledgeBase().getPrefixDeclarationRegistry()); } } From 03946ac2b1d91cc6a5e9735299af94f7ef51fd29 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 27 Aug 2020 14:29:43 +0200 Subject: [PATCH 0777/1003] test read unparseable command --- .../rulewerk/client/shell/ShellTest.java | 75 +++++++++++-------- .../rulewerk/client/shell/ShellTestUtils.java | 32 ++++++++ .../commands/ExitCommandInterpreterTest.java | 30 ++------ 3 files changed, 80 insertions(+), 57 deletions(-) create mode 100644 rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java index 0393a18a0..5c5b2fd72 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java @@ -20,23 +20,21 @@ * #L% */ - import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; -import java.io.PrintWriter; +import java.io.StringWriter; import org.jline.reader.LineReader; -import org.jline.terminal.Terminal; import org.junit.Test; import org.mockito.Mockito; import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter; +import org.semanticweb.rulewerk.commands.CommandExecutionException; import org.semanticweb.rulewerk.commands.Interpreter; import org.semanticweb.rulewerk.core.model.api.Command; -import org.semanticweb.rulewerk.core.reasoner.Reasoner; -import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; -import org.semanticweb.rulewerk.parser.ParserConfiguration; +import org.semanticweb.rulewerk.parser.ParsingException; public class ShellTest { @@ -92,54 +90,67 @@ public void readCommand_Blank() { } @Test - public void readCommand_Invalid() { + public void readCommand_Unknown() throws ParsingException { final LineReader lineReaderMock = Mockito.mock(LineReader.class); final String prompt = "myPrompt"; - final Shell shell = new Shell(Mockito.mock(Interpreter.class)); - Mockito.when(lineReaderMock.readLine(prompt)).thenReturn("invalid"); + final StringWriter stringWriter = new StringWriter(); + final Interpreter interpreter = ShellTestUtils.getMockInterpreter(stringWriter); + final Interpreter interpreterSpy = Mockito.spy(interpreter); + final Shell shell = new Shell(interpreterSpy); + + Mockito.when(lineReaderMock.readLine(prompt)).thenReturn("unknown"); final Command command = shell.readCommand(lineReaderMock, prompt); - assertNull(command); - // TODO test interpreter.parseCommand was called - // TODO test Parsing exception has been thrown + Mockito.verify(interpreterSpy).parseCommand("@unknown ."); + assertEquals("unknown", command.getName()); + assertTrue(command.getArguments().isEmpty()); + + // TODO test Parsing exception has not been thrown } @Test - public void readCommand_Exit() { + public void readCommand_ParsingException() throws ParsingException { final LineReader lineReaderMock = Mockito.mock(LineReader.class); final String prompt = "myPrompt"; - // TODO need real interpreter here - final Shell shell = new Shell(getMockInterpreter()); - Mockito.when(lineReaderMock.readLine(prompt)).thenReturn("exit"); + final StringWriter stringWriter = new StringWriter(); + final Interpreter interpreter = ShellTestUtils.getMockInterpreter(stringWriter); + final Interpreter interpreterSpy = Mockito.spy(interpreter); + final Shell shell = new Shell(interpreterSpy); - final Command command = shell.readCommand(lineReaderMock, prompt); - assertEquals(ExitCommandInterpreter.EXIT_COMMAND.getName(), command.getName()); + Mockito.when(lineReaderMock.readLine(prompt)).thenReturn("@"); - // TODO test Parsing exception has not been thrown - // TODO test ExitCommandInterpreter.run() has been called + final Command command = shell.readCommand(lineReaderMock, prompt); - assertFalse(shell.running); + Mockito.verify(interpreterSpy).parseCommand("@ ."); + assertNull(command); + + // TODO test Parsing exception has been thrown + assertTrue(stringWriter.toString().startsWith("Error: ")); } - static public Interpreter getMockInterpreter() { - final Terminal terminal = Mockito.mock(Terminal.class); - final PrintWriter printWriter = Mockito.mock(PrintWriter.class); - Mockito.when(terminal.writer()).thenReturn(printWriter); + @Test + public void readCommand_Exit() throws CommandExecutionException { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + + final String prompt = "myPrompt"; + + final StringWriter stringWriter = new StringWriter(); + final Interpreter interpreterMock = ShellTestUtils.getMockInterpreter(stringWriter); + final Shell shell = new Shell(interpreterMock); - final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + Mockito.when(lineReaderMock.readLine(prompt)).thenReturn("exit"); - final Interpreter interpreter = new Interpreter(Interpreter.EMPTY_KNOWLEDGE_BASE_PROVIDER, (kb) -> { - Reasoner reasoner = Mockito.mock(Reasoner.class); - Mockito.when(reasoner.getKnowledgeBase()).thenReturn(kb); - return reasoner; - }, new TerminalStyledPrinter(terminal), parserConfiguration); + final Command command = shell.readCommand(lineReaderMock, prompt); + assertEquals(ExitCommandInterpreter.EXIT_COMMAND.getName(), command.getName()); + assertTrue(command.getArguments().isEmpty()); - return interpreter; + // TODO test Parsing exception has not been thrown + assertFalse(shell.running); } } diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java new file mode 100644 index 000000000..8cb2f27e0 --- /dev/null +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java @@ -0,0 +1,32 @@ +package org.semanticweb.rulewerk.client.shell; + +import java.io.PrintWriter; +import java.io.Writer; + +import org.jline.terminal.Terminal; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.commands.Interpreter; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; +import org.semanticweb.rulewerk.parser.ParserConfiguration; + +public final class ShellTestUtils { + + private ShellTestUtils() { + } + + public static Interpreter getMockInterpreter(final Writer writer) { + final Terminal terminalMock = Mockito.mock(Terminal.class); + final TerminalStyledPrinter terminalStyledPrinter = new TerminalStyledPrinter(terminalMock); + final PrintWriter printWriter = new PrintWriter(writer); + Mockito.when(terminalMock.writer()).thenReturn(printWriter); + + final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + return new Interpreter(Interpreter.EMPTY_KNOWLEDGE_BASE_PROVIDER, (knowledgeBase) -> { + final Reasoner reasoner = Mockito.mock(Reasoner.class); + Mockito.when(reasoner.getKnowledgeBase()).thenReturn(knowledgeBase); + return reasoner; + }, terminalStyledPrinter, parserConfiguration); + } + +} diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java index f6169e80b..33da1aa4c 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java @@ -25,19 +25,15 @@ import static org.junit.Assert.assertTrue; import java.io.StringWriter; -import java.io.Writer; import org.junit.Test; import org.mockito.Mockito; import org.semanticweb.rulewerk.client.shell.Shell; +import org.semanticweb.rulewerk.client.shell.ShellTestUtils; import org.semanticweb.rulewerk.commands.CommandExecutionException; import org.semanticweb.rulewerk.commands.CommandInterpreter; import org.semanticweb.rulewerk.commands.Interpreter; -import org.semanticweb.rulewerk.commands.SimpleStyledPrinter; import org.semanticweb.rulewerk.core.model.api.Command; -import org.semanticweb.rulewerk.core.reasoner.Reasoner; -import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; -import org.semanticweb.rulewerk.parser.ParserConfiguration; import org.semanticweb.rulewerk.parser.ParsingException; public class ExitCommandInterpreterTest { @@ -47,7 +43,7 @@ public void exitShell_succeeds() throws CommandExecutionException { final Interpreter interpreterMock = Mockito.mock(Interpreter.class); final Shell shell = new Shell(interpreterMock); final Shell shellSpy = Mockito.spy(shell); - final CommandInterpreter commandInterpreter = new ExitCommandInterpreter(shellSpy); + final ExitCommandInterpreter commandInterpreter = new ExitCommandInterpreter(shellSpy); commandInterpreter.run(Mockito.mock(Command.class), interpreterMock); @@ -57,9 +53,10 @@ public void exitShell_succeeds() throws CommandExecutionException { @Test public void help_succeeds() throws ParsingException, CommandExecutionException { final Shell shellMock = Mockito.mock(Shell.class); - final CommandInterpreter commandInterpreter = new ExitCommandInterpreter(shellMock); + final ExitCommandInterpreter commandInterpreter = new ExitCommandInterpreter(shellMock); + final StringWriter writer = new StringWriter(); - final Interpreter interpreter = getMockInterpreter(writer); + final Interpreter interpreter = ShellTestUtils.getMockInterpreter(writer); final Interpreter interpreterSpy = Mockito.spy(interpreter); commandInterpreter.printHelp("commandname", interpreterSpy); @@ -68,23 +65,6 @@ public void help_succeeds() throws ParsingException, CommandExecutionException { final String result = writer.toString(); assertEquals("Usage: commandname.\n", result); - - // TODO what about testing printing to terminal? - // TODO establish test scope - } - -// static public Interpreter getMockTerminalInterpreter(final Terminal terminal) { -// final StyledPrinter printer = new TerminalStyledPrinter(terminal); -// final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); -// final Reasoner reasoner = Mockito.mock(Reasoner.class); -// return new Interpreter(reasoner, printer, parserConfiguration); -//} - - static public Interpreter getMockInterpreter(final Writer writer) { - final SimpleStyledPrinter printer = new SimpleStyledPrinter(writer); - final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); - return new Interpreter(Interpreter.EMPTY_KNOWLEDGE_BASE_PROVIDER, (kb) -> Mockito.mock(Reasoner.class), printer, - parserConfiguration); } @Test From af62a1d61c6efa6c15e4f49ee3f6d0989a6759a1 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 27 Aug 2020 15:08:55 +0200 Subject: [PATCH 0778/1003] test read command interrupt request --- .../commands/ExitCommandInterpreter.java | 2 +- .../rulewerk/client/shell/ShellTest.java | 108 ++++++++++++------ .../rulewerk/client/shell/ShellTestUtils.java | 30 +++++ 3 files changed, 104 insertions(+), 36 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java index 85f4edb6d..706275678 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java @@ -30,7 +30,7 @@ public class ExitCommandInterpreter implements CommandInterpreter { - public static final Command EXIT_COMMAND = new Command(ExitCommandName.exit.toString(), new ArrayList<>()); + public static final Command EXIT_COMMAND = new Command(ExitCommandName.exit.toString(), new ArrayList<>(0)); public static enum ExitCommandName { exit; diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java index 5c5b2fd72..32004381b 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java @@ -21,16 +21,16 @@ */ import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.io.StringWriter; +import org.jline.reader.EndOfFileException; import org.jline.reader.LineReader; +import org.jline.reader.UserInterruptException; import org.junit.Test; import org.mockito.Mockito; -import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter; import org.semanticweb.rulewerk.commands.CommandExecutionException; import org.semanticweb.rulewerk.commands.Interpreter; import org.semanticweb.rulewerk.core.model.api.Command; @@ -38,71 +38,71 @@ public class ShellTest { + private final String prompt = "myPrompt"; + @Test - public void processReadLine_Blank() { + public void processReadLine_blank() { final Shell shell = new Shell(Mockito.mock(Interpreter.class)); final String processedReadLine = shell.processReadLine(" "); assertEquals("", processedReadLine); } @Test - public void processReadLine_StartsWithAt() { + public void processReadLine_startsWithAt() { final Shell shell = new Shell(Mockito.mock(Interpreter.class)); final String processedReadLine = shell.processReadLine(" @ "); assertEquals("@ .", processedReadLine); } @Test - public void processReadLine_EndsWithStop() { + public void processReadLine_endsWithStop() { final Shell shell = new Shell(Mockito.mock(Interpreter.class)); final String processedReadLine = shell.processReadLine(" . "); assertEquals("@.", processedReadLine); } @Test - public void processReadLine_StartsWithAtEndsWithStop() { + public void processReadLine_startsWithAtEndsWithStop() { final Shell shell = new Shell(Mockito.mock(Interpreter.class)); final String processedReadLine = shell.processReadLine(" @. "); assertEquals("@.", processedReadLine); } @Test - public void processReadLine_DoesNotStartWithAt_DoesNotEndWithStop() { + public void processReadLine_doesNotStartWithAt_DoesNotEndWithStop() { final Shell shell = new Shell(Mockito.mock(Interpreter.class)); final String processedReadLine = shell.processReadLine(" .@ "); assertEquals("@.@ .", processedReadLine); } @Test - public void readCommand_Blank() { + public void readCommand_blank() throws ParsingException { final LineReader lineReaderMock = Mockito.mock(LineReader.class); - final String prompt = "myPrompt"; - final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final Interpreter interpreterMock = Mockito.mock(Interpreter.class); + final Shell shell = new Shell(interpreterMock); - Mockito.when(lineReaderMock.readLine(prompt)).thenReturn(" "); + Mockito.when(lineReaderMock.readLine(this.prompt)).thenReturn(" "); - final Command command = shell.readCommand(lineReaderMock, prompt); + final Command command = shell.readCommand(lineReaderMock, this.prompt); assertNull(command); - // TODO test interpreter.parseCommand was not called + Mockito.verify(interpreterMock, Mockito.never()).parseCommand(Mockito.anyString()); // TODO test exceptions have not been thrown } @Test - public void readCommand_Unknown() throws ParsingException { + public void readCommand_unknown() throws ParsingException { final LineReader lineReaderMock = Mockito.mock(LineReader.class); - final String prompt = "myPrompt"; - final StringWriter stringWriter = new StringWriter(); final Interpreter interpreter = ShellTestUtils.getMockInterpreter(stringWriter); final Interpreter interpreterSpy = Mockito.spy(interpreter); final Shell shell = new Shell(interpreterSpy); - Mockito.when(lineReaderMock.readLine(prompt)).thenReturn("unknown"); + Mockito.when(lineReaderMock.readLine(this.prompt)).thenReturn("unknown"); - final Command command = shell.readCommand(lineReaderMock, prompt); + final Command command = shell.readCommand(lineReaderMock, this.prompt); Mockito.verify(interpreterSpy).parseCommand("@unknown ."); assertEquals("unknown", command.getName()); @@ -112,45 +112,83 @@ public void readCommand_Unknown() throws ParsingException { } @Test - public void readCommand_ParsingException() throws ParsingException { + public void readCommand_parsingException() throws ParsingException { final LineReader lineReaderMock = Mockito.mock(LineReader.class); - final String prompt = "myPrompt"; - final StringWriter stringWriter = new StringWriter(); final Interpreter interpreter = ShellTestUtils.getMockInterpreter(stringWriter); final Interpreter interpreterSpy = Mockito.spy(interpreter); final Shell shell = new Shell(interpreterSpy); - Mockito.when(lineReaderMock.readLine(prompt)).thenReturn("@"); + Mockito.when(lineReaderMock.readLine(this.prompt)).thenReturn("@"); - final Command command = shell.readCommand(lineReaderMock, prompt); + final Command command = shell.readCommand(lineReaderMock, this.prompt); Mockito.verify(interpreterSpy).parseCommand("@ ."); assertNull(command); - + // TODO test Parsing exception has been thrown - assertTrue(stringWriter.toString().startsWith("Error: ")); + assertTrue(stringWriter.toString().startsWith("Error: failed to parse command")); } @Test - public void readCommand_Exit() throws CommandExecutionException { + public void readCommand_exit() throws CommandExecutionException, ParsingException { final LineReader lineReaderMock = Mockito.mock(LineReader.class); - final String prompt = "myPrompt"; - final StringWriter stringWriter = new StringWriter(); - final Interpreter interpreterMock = ShellTestUtils.getMockInterpreter(stringWriter); - final Shell shell = new Shell(interpreterMock); + final Interpreter interpreter = ShellTestUtils.getMockInterpreter(stringWriter); + final Interpreter interpreterSpy = Mockito.spy(interpreter); + final Shell shell = new Shell(interpreterSpy); - Mockito.when(lineReaderMock.readLine(prompt)).thenReturn("exit"); + Mockito.when(lineReaderMock.readLine(this.prompt)).thenReturn("exit"); - final Command command = shell.readCommand(lineReaderMock, prompt); - assertEquals(ExitCommandInterpreter.EXIT_COMMAND.getName(), command.getName()); - assertTrue(command.getArguments().isEmpty()); + final Command command = shell.readCommand(lineReaderMock, this.prompt); + ShellTestUtils.testIsExitCommand(command); + Mockito.verify(interpreterSpy).parseCommand("@exit ."); // TODO test Parsing exception has not been thrown - assertFalse(shell.running); + } + + @Test + public void readCommand_interruptRequest_CTRLC_emptyPartialLine() throws ParsingException { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + final Interpreter interpreterMock = Mockito.mock(Interpreter.class); + final Shell shell = new Shell(interpreterMock); + + Mockito.doThrow(new UserInterruptException("")).when(lineReaderMock).readLine(this.prompt); + + final Command command = shell.readCommand(lineReaderMock, this.prompt); + ShellTestUtils.testIsExitCommand(command); + + Mockito.verify(interpreterMock, Mockito.never()).parseCommand(Mockito.anyString()); + } + + @Test + public void readCommand_interruptRequest_CTRLC_nonEmptyPartialLine() throws ParsingException { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + final Interpreter interpreterMock = Mockito.mock(Interpreter.class); + final Shell shell = new Shell(interpreterMock); + + Mockito.doThrow(new UserInterruptException(" ")).when(lineReaderMock).readLine(this.prompt); + + final Command command = shell.readCommand(lineReaderMock, this.prompt); + assertNull(command); + + Mockito.verify(interpreterMock, Mockito.never()).parseCommand(Mockito.anyString()); + } + + @Test + public void readCommand_interruptRequest_CTRLD_emptyPartialLine() throws ParsingException { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + final Interpreter interpreterMock = Mockito.mock(Interpreter.class); + final Shell shell = new Shell(interpreterMock); + + Mockito.doThrow(EndOfFileException.class).when(lineReaderMock).readLine(this.prompt); + + final Command command = shell.readCommand(lineReaderMock, this.prompt); + ShellTestUtils.testIsExitCommand(command); + + Mockito.verify(interpreterMock, Mockito.never()).parseCommand(Mockito.anyString()); } } diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java index 8cb2f27e0..189a83607 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java @@ -1,11 +1,36 @@ package org.semanticweb.rulewerk.client.shell; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import java.io.PrintWriter; import java.io.Writer; import org.jline.terminal.Terminal; import org.mockito.Mockito; +import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter; import org.semanticweb.rulewerk.commands.Interpreter; +import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; import org.semanticweb.rulewerk.parser.ParserConfiguration; @@ -29,4 +54,9 @@ public static Interpreter getMockInterpreter(final Writer writer) { }, terminalStyledPrinter, parserConfiguration); } + public static void testIsExitCommand(final Command command) { + assertEquals(ExitCommandInterpreter.EXIT_COMMAND.getName(), command.getName()); + assertTrue(command.getArguments().isEmpty()); + } + } From 74ea88e11857a8eee41d390cb89a9499dcb52372 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 27 Aug 2020 15:46:15 +0200 Subject: [PATCH 0779/1003] space after prompt --- .../semanticweb/rulewerk/client/shell/DefaultConfiguration.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java index 76d8b7c3e..c1a026dc4 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java @@ -86,7 +86,7 @@ public static String buildPrompt(final Terminal terminal) { public static AttributedString buildPromptProvider() { final AttributedStyle promptStyle = AttributedStyle.DEFAULT.foreground(AttributedStyle.YELLOW); - return new AttributedString("rulewerk>", promptStyle); + return new AttributedString("rulewerk> ", promptStyle); } } From 07607529b6a04a7774314353c961cc67bc6e05c9 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 27 Aug 2020 17:22:04 +0200 Subject: [PATCH 0780/1003] modify gitignore delete test output files --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 8454baf10..b052b97ca 100644 --- a/.gitignore +++ b/.gitignore @@ -53,5 +53,6 @@ rulewerk-core/src/test/data/output/* rulewerk-examples/src/main/data/output/* rulewerk-examples/src/main/data/logs/* rulewerk-rdf/src/main/data/output/* +rulewerk-vlog/src/test/data/output/* /build-vlog/vlog/ /TAGS From e0e1710a07e4f4502ca5be25d1863ca5a2689fc8 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 28 Aug 2020 12:17:51 +0200 Subject: [PATCH 0781/1003] unit test run commands --- .../rulewerk/client/shell/Shell.java | 40 +- .../rulewerk/client/shell/ShellTest.java | 520 +++++++++++------- 2 files changed, 350 insertions(+), 210 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index 7f56b3f22..e4c82fc63 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -36,7 +36,7 @@ public class Shell { private final Interpreter interpreter; - boolean running; + private boolean running; public Shell(final Interpreter interpreter) { this.interpreter = interpreter; @@ -56,24 +56,28 @@ public void run(final LineReader lineReader, final String prompt) { this.running = true; while (this.running) { - final Command command; - try { - command = this.readCommand(lineReader, prompt); - } catch (final Exception e) { - this.interpreter.printNormal("Unexpected error: " + e.getMessage() + "\n"); - e.printStackTrace(); - continue; - } + this.runCommand(lineReader, prompt); + } + this.interpreter.printSection("Exiting Rulewerk shell ... bye.\n\n"); + } + + Command runCommand(final LineReader lineReader, final String prompt) { + Command command = null; + try { + command = this.readCommand(lineReader, prompt); + } catch (final Exception e) { + this.interpreter.printNormal("Unexpected error: " + e.getMessage() + "\n"); + e.printStackTrace(); + } - if (command != null) { - try { - this.interpreter.runCommand(command); - } catch (final CommandExecutionException e) { - this.interpreter.printNormal("Error: " + e.getMessage() + "\n"); - } + if (command != null) { + try { + this.interpreter.runCommand(command); + } catch (final CommandExecutionException e) { + this.interpreter.printNormal("Error: " + e.getMessage() + "\n"); } } - this.interpreter.printSection("Exiting Rulewerk shell ... bye.\n\n"); + return command; } /** @@ -142,4 +146,8 @@ private void printWelcome() { this.interpreter.printNormal("\n"); } + boolean isRunning() { + return this.running; + } + } diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java index 32004381b..d5f1d3e18 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java @@ -1,194 +1,326 @@ -package org.semanticweb.rulewerk.client.shell; - -/*- - * #%L - * Rulewerk Client - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; - -import java.io.StringWriter; - -import org.jline.reader.EndOfFileException; -import org.jline.reader.LineReader; -import org.jline.reader.UserInterruptException; -import org.junit.Test; -import org.mockito.Mockito; -import org.semanticweb.rulewerk.commands.CommandExecutionException; -import org.semanticweb.rulewerk.commands.Interpreter; -import org.semanticweb.rulewerk.core.model.api.Command; -import org.semanticweb.rulewerk.parser.ParsingException; - -public class ShellTest { - - private final String prompt = "myPrompt"; - - @Test - public void processReadLine_blank() { - final Shell shell = new Shell(Mockito.mock(Interpreter.class)); - final String processedReadLine = shell.processReadLine(" "); - assertEquals("", processedReadLine); - } - - @Test - public void processReadLine_startsWithAt() { - final Shell shell = new Shell(Mockito.mock(Interpreter.class)); - final String processedReadLine = shell.processReadLine(" @ "); - assertEquals("@ .", processedReadLine); - } - - @Test - public void processReadLine_endsWithStop() { - final Shell shell = new Shell(Mockito.mock(Interpreter.class)); - final String processedReadLine = shell.processReadLine(" . "); - assertEquals("@.", processedReadLine); - } - - @Test - public void processReadLine_startsWithAtEndsWithStop() { - final Shell shell = new Shell(Mockito.mock(Interpreter.class)); - final String processedReadLine = shell.processReadLine(" @. "); - assertEquals("@.", processedReadLine); - } - - @Test - public void processReadLine_doesNotStartWithAt_DoesNotEndWithStop() { - final Shell shell = new Shell(Mockito.mock(Interpreter.class)); - final String processedReadLine = shell.processReadLine(" .@ "); - assertEquals("@.@ .", processedReadLine); - } - - @Test - public void readCommand_blank() throws ParsingException { - final LineReader lineReaderMock = Mockito.mock(LineReader.class); - - final Interpreter interpreterMock = Mockito.mock(Interpreter.class); - final Shell shell = new Shell(interpreterMock); - - Mockito.when(lineReaderMock.readLine(this.prompt)).thenReturn(" "); - - final Command command = shell.readCommand(lineReaderMock, this.prompt); - assertNull(command); - - Mockito.verify(interpreterMock, Mockito.never()).parseCommand(Mockito.anyString()); - // TODO test exceptions have not been thrown - } - - @Test - public void readCommand_unknown() throws ParsingException { - final LineReader lineReaderMock = Mockito.mock(LineReader.class); - - final StringWriter stringWriter = new StringWriter(); - final Interpreter interpreter = ShellTestUtils.getMockInterpreter(stringWriter); - final Interpreter interpreterSpy = Mockito.spy(interpreter); - final Shell shell = new Shell(interpreterSpy); - - Mockito.when(lineReaderMock.readLine(this.prompt)).thenReturn("unknown"); - - final Command command = shell.readCommand(lineReaderMock, this.prompt); - - Mockito.verify(interpreterSpy).parseCommand("@unknown ."); - assertEquals("unknown", command.getName()); - assertTrue(command.getArguments().isEmpty()); - - // TODO test Parsing exception has not been thrown - } - - @Test - public void readCommand_parsingException() throws ParsingException { - final LineReader lineReaderMock = Mockito.mock(LineReader.class); - - final StringWriter stringWriter = new StringWriter(); - final Interpreter interpreter = ShellTestUtils.getMockInterpreter(stringWriter); - final Interpreter interpreterSpy = Mockito.spy(interpreter); - final Shell shell = new Shell(interpreterSpy); - - Mockito.when(lineReaderMock.readLine(this.prompt)).thenReturn("@"); - - final Command command = shell.readCommand(lineReaderMock, this.prompt); - - Mockito.verify(interpreterSpy).parseCommand("@ ."); - assertNull(command); - - // TODO test Parsing exception has been thrown - assertTrue(stringWriter.toString().startsWith("Error: failed to parse command")); - } - - @Test - public void readCommand_exit() throws CommandExecutionException, ParsingException { - final LineReader lineReaderMock = Mockito.mock(LineReader.class); - - final StringWriter stringWriter = new StringWriter(); - final Interpreter interpreter = ShellTestUtils.getMockInterpreter(stringWriter); - final Interpreter interpreterSpy = Mockito.spy(interpreter); - final Shell shell = new Shell(interpreterSpy); - - Mockito.when(lineReaderMock.readLine(this.prompt)).thenReturn("exit"); - - final Command command = shell.readCommand(lineReaderMock, this.prompt); - ShellTestUtils.testIsExitCommand(command); - Mockito.verify(interpreterSpy).parseCommand("@exit ."); - - // TODO test Parsing exception has not been thrown - } - - @Test - public void readCommand_interruptRequest_CTRLC_emptyPartialLine() throws ParsingException { - final LineReader lineReaderMock = Mockito.mock(LineReader.class); - final Interpreter interpreterMock = Mockito.mock(Interpreter.class); - final Shell shell = new Shell(interpreterMock); - - Mockito.doThrow(new UserInterruptException("")).when(lineReaderMock).readLine(this.prompt); - - final Command command = shell.readCommand(lineReaderMock, this.prompt); - ShellTestUtils.testIsExitCommand(command); - - Mockito.verify(interpreterMock, Mockito.never()).parseCommand(Mockito.anyString()); - } - - @Test - public void readCommand_interruptRequest_CTRLC_nonEmptyPartialLine() throws ParsingException { - final LineReader lineReaderMock = Mockito.mock(LineReader.class); - final Interpreter interpreterMock = Mockito.mock(Interpreter.class); - final Shell shell = new Shell(interpreterMock); - - Mockito.doThrow(new UserInterruptException(" ")).when(lineReaderMock).readLine(this.prompt); - - final Command command = shell.readCommand(lineReaderMock, this.prompt); - assertNull(command); - - Mockito.verify(interpreterMock, Mockito.never()).parseCommand(Mockito.anyString()); - } - - @Test - public void readCommand_interruptRequest_CTRLD_emptyPartialLine() throws ParsingException { - final LineReader lineReaderMock = Mockito.mock(LineReader.class); - final Interpreter interpreterMock = Mockito.mock(Interpreter.class); - final Shell shell = new Shell(interpreterMock); - - Mockito.doThrow(EndOfFileException.class).when(lineReaderMock).readLine(this.prompt); - - final Command command = shell.readCommand(lineReaderMock, this.prompt); - ShellTestUtils.testIsExitCommand(command); - - Mockito.verify(interpreterMock, Mockito.never()).parseCommand(Mockito.anyString()); - } - -} +package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import java.io.StringWriter; +import java.io.Writer; + +import org.jline.reader.EndOfFileException; +import org.jline.reader.LineReader; +import org.jline.reader.UserInterruptException; +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.commands.CommandExecutionException; +import org.semanticweb.rulewerk.commands.Interpreter; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class ShellTest { + + private final String prompt = "myPrompt"; + + @Test + public void processReadLine_blank() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" "); + assertEquals("", processedReadLine); + } + + @Test + public void processReadLine_startsWithAt() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" @ "); + assertEquals("@ .", processedReadLine); + } + + @Test + public void processReadLine_endsWithStop() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" . "); + assertEquals("@.", processedReadLine); + } + + @Test + public void processReadLine_startsWithAtEndsWithStop() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" @. "); + assertEquals("@.", processedReadLine); + } + + @Test + public void processReadLine_doesNotStartWithAt_DoesNotEndWithStop() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" .@ "); + assertEquals("@.@ .", processedReadLine); + } + + @Test + public void readCommand_blank() throws ParsingException { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + + final Interpreter interpreterMock = Mockito.mock(Interpreter.class); + final Shell shell = new Shell(interpreterMock); + + Mockito.when(lineReaderMock.readLine(this.prompt)).thenReturn(" "); + + final Command command = shell.readCommand(lineReaderMock, this.prompt); + assertNull(command); + + Mockito.verify(interpreterMock, Mockito.never()).parseCommand(Mockito.anyString()); + // TODO test exceptions have not been thrown + } + + @Test + public void readCommand_unknown() throws ParsingException { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + + final StringWriter stringWriter = new StringWriter(); + final Interpreter interpreter = ShellTestUtils.getMockInterpreter(stringWriter); + final Interpreter interpreterSpy = Mockito.spy(interpreter); + final Shell shell = new Shell(interpreterSpy); + + Mockito.when(lineReaderMock.readLine(this.prompt)).thenReturn("unknown"); + + final Command command = shell.readCommand(lineReaderMock, this.prompt); + + Mockito.verify(interpreterSpy).parseCommand("@unknown ."); + assertEquals("unknown", command.getName()); + assertTrue(command.getArguments().isEmpty()); + + // TODO test Parsing exception has not been thrown + } + + @Test + public void readCommand_parsingException() throws ParsingException { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + + final StringWriter stringWriter = new StringWriter(); + final Interpreter interpreter = ShellTestUtils.getMockInterpreter(stringWriter); + final Interpreter interpreterSpy = Mockito.spy(interpreter); + final Shell shell = new Shell(interpreterSpy); + + Mockito.when(lineReaderMock.readLine(this.prompt)).thenReturn("@"); + + final Command command = shell.readCommand(lineReaderMock, this.prompt); + + Mockito.verify(interpreterSpy).parseCommand("@ ."); + assertNull(command); + + // TODO test Parsing exception has been thrown + assertTrue(stringWriter.toString().startsWith("Error: failed to parse command")); + } + + @Test + public void readCommand_exit() throws CommandExecutionException, ParsingException { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + + final StringWriter stringWriter = new StringWriter(); + final Interpreter interpreter = ShellTestUtils.getMockInterpreter(stringWriter); + final Interpreter interpreterSpy = Mockito.spy(interpreter); + final Shell shell = new Shell(interpreterSpy); + + Mockito.when(lineReaderMock.readLine(this.prompt)).thenReturn("exit"); + + final Command command = shell.readCommand(lineReaderMock, this.prompt); + ShellTestUtils.testIsExitCommand(command); + Mockito.verify(interpreterSpy).parseCommand("@exit ."); + + // TODO test Parsing exception has not been thrown + } + + @Test + public void readCommand_interruptRequest_CTRLC_emptyPartialLine() throws ParsingException { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + final Interpreter interpreterMock = Mockito.mock(Interpreter.class); + final Shell shell = new Shell(interpreterMock); + + Mockito.doThrow(new UserInterruptException("")).when(lineReaderMock).readLine(this.prompt); + + final Command command = shell.readCommand(lineReaderMock, this.prompt); + ShellTestUtils.testIsExitCommand(command); + + Mockito.verify(interpreterMock, Mockito.never()).parseCommand(Mockito.anyString()); + } + + @Test + public void readCommand_interruptRequest_CTRLC_nonEmptyPartialLine() throws ParsingException { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + final Interpreter interpreterMock = Mockito.mock(Interpreter.class); + final Shell shell = new Shell(interpreterMock); + + Mockito.doThrow(new UserInterruptException(" ")).when(lineReaderMock).readLine(this.prompt); + + final Command command = shell.readCommand(lineReaderMock, this.prompt); + assertNull(command); + + Mockito.verify(interpreterMock, Mockito.never()).parseCommand(Mockito.anyString()); + } + + @Test + public void readCommand_interruptRequest_CTRLD_emptyPartialLine() throws ParsingException { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + final Interpreter interpreterMock = Mockito.mock(Interpreter.class); + final Shell shell = new Shell(interpreterMock); + + Mockito.doThrow(EndOfFileException.class).when(lineReaderMock).readLine(this.prompt); + + final Command command = shell.readCommand(lineReaderMock, this.prompt); + ShellTestUtils.testIsExitCommand(command); + + Mockito.verify(interpreterMock, Mockito.never()).parseCommand(Mockito.anyString()); + } + + @Test + public void run_exit() throws CommandExecutionException { + final Writer writer = new StringWriter(); + final Interpreter interpreter = ShellTestUtils.getMockInterpreter(writer); + final Interpreter interpreterSpy = Mockito.spy(interpreter); + final Shell shell = new Shell(interpreterSpy); + + final LineReader lineReader = Mockito.mock(LineReader.class); + Mockito.when(lineReader.readLine(this.prompt)).thenReturn("exit"); + + shell.run(lineReader, this.prompt); + + assertFalse(shell.isRunning()); + + this.testPrintWelcome(interpreterSpy); + + Mockito.verify(interpreterSpy).runCommand(Mockito.any(Command.class)); + + this.testPrintExit(interpreterSpy); + + final String[] lines = writer.toString().split("\r\n|\r|\n"); + assertEquals(7, lines.length); + } + + @Test + public void run_empty_exit() throws CommandExecutionException { + final Writer writer = new StringWriter(); + final Interpreter interpreter = ShellTestUtils.getMockInterpreter(writer); + final Interpreter interpreterSpy = Mockito.spy(interpreter); + final Shell shell = new Shell(interpreterSpy); + + final LineReader lineReader = Mockito.mock(LineReader.class); + Mockito.when(lineReader.readLine(this.prompt)).thenReturn("", "exit"); + + shell.run(lineReader, this.prompt); + + assertFalse(shell.isRunning()); + + this.testPrintWelcome(interpreterSpy); + + Mockito.verify(interpreterSpy).runCommand(Mockito.any(Command.class)); + + this.testPrintExit(interpreterSpy); + + final String[] lines = writer.toString().split("\r\n|\r|\n"); + assertEquals(7, lines.length); + } + + @Test + public void run_help_exit() throws CommandExecutionException { + final Writer writer = new StringWriter(); + final Interpreter interpreter = ShellTestUtils.getMockInterpreter(writer); + final Interpreter interpreterSpy = Mockito.spy(interpreter); + final Shell shell = new Shell(interpreterSpy); + + final LineReader lineReader = Mockito.mock(LineReader.class); + Mockito.when(lineReader.readLine(this.prompt)).thenReturn("help", "exit"); + + shell.run(lineReader, this.prompt); + + assertFalse(shell.isRunning()); + + this.testPrintWelcome(interpreterSpy); + + Mockito.verify(interpreterSpy, Mockito.times(2)).runCommand(Mockito.any(Command.class)); + + this.testPrintExit(interpreterSpy); + + final String[] lines = writer.toString().split("\r\n|\r|\n"); + assertTrue(lines.length > 7); + } + + @Test + public void runCommand_unknown() throws CommandExecutionException { + final Writer writer = new StringWriter(); + final Interpreter interpreter = ShellTestUtils.getMockInterpreter(writer); + final Interpreter interpreterSpy = Mockito.spy(interpreter); + final Shell shell = new Shell(interpreterSpy); + + final LineReader lineReader = Mockito.mock(LineReader.class); + Mockito.when(lineReader.readLine(this.prompt)).thenReturn("unknown", "exit"); + + final Command command = shell.runCommand(lineReader, this.prompt); + assertNotNull(command); + assertEquals("unknown", command.getName()); + + Mockito.verify(interpreterSpy).runCommand(Mockito.any(Command.class)); + + final String printedResult = writer.toString(); + assertTrue(printedResult.startsWith("Error: ")); + } + + @Test + public void runCommand_exceptionDuringReading() throws CommandExecutionException { + final Writer writer = new StringWriter(); + final Interpreter interpreter = ShellTestUtils.getMockInterpreter(writer); + final Interpreter interpreterSpy = Mockito.spy(interpreter); + final Shell shell = new Shell(interpreterSpy); + + final LineReader lineReader = Mockito.mock(LineReader.class); + final RuntimeException runtimeException = new RuntimeException("test"); + final RuntimeException runtimeExceptionSpy = Mockito.spy(runtimeException); + + Mockito.when(lineReader.readLine(this.prompt)).thenThrow(runtimeExceptionSpy); + + final Command command = shell.runCommand(lineReader, this.prompt); + assertNull(command); + + Mockito.verify(interpreterSpy, Mockito.never()).runCommand(Mockito.any(Command.class)); + + final String printedResult = writer.toString(); + assertTrue(printedResult.startsWith("Unexpected error: " + runtimeException.getMessage())); + + Mockito.verify(runtimeExceptionSpy).printStackTrace(); + } + + public void testPrintWelcome(final Interpreter interpreterSpy) { + Mockito.verify(interpreterSpy, Mockito.times(2)).printNormal("\n"); + Mockito.verify(interpreterSpy).printSection("Welcome to the Rulewerk interactive shell.\n"); + Mockito.verify(interpreterSpy).printNormal("For further information, type "); + Mockito.verify(interpreterSpy).printCode("@help."); + Mockito.verify(interpreterSpy).printNormal(" To quit, type "); + Mockito.verify(interpreterSpy).printCode("@exit.\n"); + } + + public void testPrintExit(final Interpreter interpreterSpy) { + Mockito.verify(interpreterSpy).printSection("Exiting Rulewerk shell ... bye.\n\n"); + } + +} From 0691d9b014fa8f49759406f634240d35548cd2c4 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 28 Aug 2020 13:14:25 +0200 Subject: [PATCH 0782/1003] test terminal styled printer --- .../shell/TerminalStylePrinterTest.java | 63 +++++++++++++++++++ 1 file changed, 63 insertions(+) create mode 100644 rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/TerminalStylePrinterTest.java diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/TerminalStylePrinterTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/TerminalStylePrinterTest.java new file mode 100644 index 000000000..beec791ab --- /dev/null +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/TerminalStylePrinterTest.java @@ -0,0 +1,63 @@ +package org.semanticweb.rulewerk.client.shell; + +import java.io.PrintWriter; + +import org.jline.terminal.Terminal; +import org.jline.utils.AttributedString; +import org.jline.utils.AttributedStyle; +import org.junit.Test; +import org.mockito.Mockito; + +public class TerminalStylePrinterTest { + final Terminal terminal; + final PrintWriter writer; + final TerminalStyledPrinter terminalStyledPrinter; + + public static final String TEST_STRING = "test"; + + public TerminalStylePrinterTest() { + this.writer = Mockito.mock(PrintWriter.class); + this.terminal = Mockito.mock(Terminal.class); + Mockito.when(this.terminal.writer()).thenReturn(this.writer); + + this.terminalStyledPrinter = new TerminalStyledPrinter(this.terminal); + + } + + @Test + public void testPrintNormal() { + this.terminalStyledPrinter.printNormal(TEST_STRING); + this.testPrintStyledExpected(AttributedStyle.DEFAULT); + } + + @Test + public void testPrintSection() { + this.terminalStyledPrinter.printSection(TEST_STRING); + this.testPrintStyledExpected(AttributedStyle.DEFAULT.bold()); + } + + @Test + public void testPrintEmph() { + this.terminalStyledPrinter.printEmph(TEST_STRING); + this.testPrintStyledExpected(AttributedStyle.DEFAULT.bold()); + } + + @Test + public void testPrintCode() { + this.terminalStyledPrinter.printCode(TEST_STRING); + this.testPrintStyledExpected(AttributedStyle.DEFAULT.foreground(AttributedStyle.YELLOW).bold()); + } + + @Test + public void testPrintImportant() { + this.terminalStyledPrinter.printImportant(TEST_STRING); + this.testPrintStyledExpected(AttributedStyle.DEFAULT.foreground(AttributedStyle.RED)); + } + + private void testPrintStyledExpected(final AttributedStyle expectedStyle) { + final AttributedString expectedAttributedString = new AttributedString(TEST_STRING, expectedStyle); + Mockito.verify(this.writer).print(expectedAttributedString.toAnsi(this.terminal)); + Mockito.verify(this.writer).flush(); + } + +} From cbb83136e8dfb3c2c13fc5cc8aa49a12b91d12bd Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 28 Aug 2020 14:27:45 +0200 Subject: [PATCH 0783/1003] interactive shell initialize interpreter unit test --- .../client/shell/DefaultConfiguration.java | 10 +++-- .../shell/DefaultConfigurationTest.java | 42 +++++++++++++++++++ .../client/shell/InteractiveShellTest.java | 31 ++++++++++++++ 3 files changed, 80 insertions(+), 3 deletions(-) create mode 100644 rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultConfigurationTest.java create mode 100644 rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellTest.java diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java index c1a026dc4..409da872a 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java @@ -77,14 +77,18 @@ private static Completer buildCompleter(final Interpreter interpreter) { } public static Terminal buildTerminal() throws IOException { - return TerminalBuilder.builder().dumb(true).jansi(true).jna(false).system(true).build(); + return getDefaultTerminalConfiguration().build(); + } + + static TerminalBuilder getDefaultTerminalConfiguration() { + return TerminalBuilder.builder().dumb(true).jansi(true).jna(false).system(true); } public static String buildPrompt(final Terminal terminal) { - return buildPromptProvider().toAnsi(terminal); + return getDefaultPromptStyle().toAnsi(terminal); } - public static AttributedString buildPromptProvider() { + static AttributedString getDefaultPromptStyle() { final AttributedStyle promptStyle = AttributedStyle.DEFAULT.foreground(AttributedStyle.YELLOW); return new AttributedString("rulewerk> ", promptStyle); } diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultConfigurationTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultConfigurationTest.java new file mode 100644 index 000000000..422456688 --- /dev/null +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultConfigurationTest.java @@ -0,0 +1,42 @@ +package org.semanticweb.rulewerk.client.shell; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; + +import org.jline.terminal.Terminal; +import org.jline.terminal.TerminalBuilder; +import org.jline.utils.AttributedString; +import org.junit.Test; +import org.mockito.Mockito; + +public class DefaultConfigurationTest { + + @Test + public void buildPromptProvider() { + final AttributedString promptProvider = DefaultConfiguration.getDefaultPromptStyle(); + assertEquals("rulewerk> ", promptProvider.toString()); + } + + @Test + public void buildPrompt() { + final Terminal terminal = Mockito.mock(Terminal.class); + Mockito.when(terminal.getType()).thenReturn(Terminal.TYPE_DUMB); + final String string = DefaultConfiguration.buildPrompt(terminal); + assertTrue(string.length() >= 10); + } + + public void buildTerminal() throws IOException { + final TerminalBuilder terminalBuilderMock = Mockito.mock(TerminalBuilder.class); + Mockito.when(TerminalBuilder.builder()).thenReturn(terminalBuilderMock); + + Mockito.verify(terminalBuilderMock.dumb(true)); + Mockito.verify(terminalBuilderMock.jansi(true)); + Mockito.verify(terminalBuilderMock.jna(false)); + Mockito.verify(terminalBuilderMock.system(true)); + Mockito.verify(terminalBuilderMock.build()); + + } + +} diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellTest.java new file mode 100644 index 000000000..310333f30 --- /dev/null +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellTest.java @@ -0,0 +1,31 @@ +package org.semanticweb.rulewerk.client.shell; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.io.PrintWriter; + +import org.jline.terminal.Terminal; +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.commands.Interpreter; +import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; + +public class InteractiveShellTest { + + @Test + public void initializeInterpreter() { + final Terminal terminal = Mockito.mock(Terminal.class); + final PrintWriter writer = Mockito.mock(PrintWriter.class); + Mockito.when(terminal.writer()).thenReturn(writer); + + final InteractiveShell interactiveShell = new InteractiveShell(); + final Interpreter interpreter = interactiveShell.initializeInterpreter(terminal); + + assertTrue(interpreter.getParserConfiguration() instanceof DefaultParserConfiguration); + assertTrue(interpreter.getKnowledgeBase().getStatements().isEmpty()); + assertEquals(writer, interpreter.getWriter()); + } + + +} From 90aec041e38bb6d714c50ae2127b266e0d564233 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 28 Aug 2020 14:38:04 +0200 Subject: [PATCH 0784/1003] completer help commands --- .../client/shell/DefaultConfiguration.java | 4 ++++ .../shell/DefaultConfigurationTest.java | 20 +++++++++++++++++++ .../client/shell/InteractiveShellTest.java | 20 +++++++++++++++++++ .../shell/TerminalStylePrinterTest.java | 20 +++++++++++++++++++ 4 files changed, 64 insertions(+) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java index 409da872a..29a43e4fb 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java @@ -32,6 +32,7 @@ import org.jline.reader.Completer; import org.jline.reader.LineReader; import org.jline.reader.LineReaderBuilder; +import org.jline.reader.impl.completer.StringsCompleter; import org.jline.terminal.Terminal; import org.jline.terminal.TerminalBuilder; import org.jline.utils.AttributedString; @@ -68,6 +69,9 @@ private static Completer buildCompleter(final Interpreter interpreter) { registeredCommandNames.stream().map(commandName -> "@" + commandName).forEach(serializedCommandName -> { if (serializedCommandName.equals("@load")) { nodes.add(TreeCompleter.node(serializedCommandName, TreeCompleter.node(fileNameCompleter))); + } else if (serializedCommandName.equals("@help")) { + nodes.add(TreeCompleter.node(serializedCommandName, + TreeCompleter.node(new StringsCompleter(registeredCommandNames)))); } else { nodes.add(TreeCompleter.node(serializedCommandName)); } diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultConfigurationTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultConfigurationTest.java index 422456688..2c19f1f06 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultConfigurationTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultConfigurationTest.java @@ -1,5 +1,25 @@ package org.semanticweb.rulewerk.client.shell; +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellTest.java index 310333f30..2c9bfb90a 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellTest.java @@ -1,5 +1,25 @@ package org.semanticweb.rulewerk.client.shell; +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/TerminalStylePrinterTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/TerminalStylePrinterTest.java index beec791ab..0f0ff0253 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/TerminalStylePrinterTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/TerminalStylePrinterTest.java @@ -1,5 +1,25 @@ package org.semanticweb.rulewerk.client.shell; +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import java.io.PrintWriter; import org.jline.terminal.Terminal; From f09fa7848783b0d9deb1946953851176573a110a Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 28 Aug 2020 15:39:12 +0200 Subject: [PATCH 0785/1003] clean code lineReader builder --- .../client/shell/DefaultConfiguration.java | 22 ++++++++----------- .../shell/DefaultConfigurationTest.java | 15 ------------- .../client/shell/InteractiveShellTest.java | 1 - 3 files changed, 9 insertions(+), 29 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java index 29a43e4fb..08b7242e5 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java @@ -28,7 +28,6 @@ import org.jline.builtins.Completers; import org.jline.builtins.Completers.FileNameCompleter; import org.jline.builtins.Completers.TreeCompleter; -import org.jline.builtins.Completers.TreeCompleter.Node; import org.jline.reader.Completer; import org.jline.reader.LineReader; import org.jline.reader.LineReaderBuilder; @@ -46,18 +45,16 @@ private DefaultConfiguration() { public static LineReader buildLineReader(final Terminal terminal, final Interpreter interpreter) { final LineReaderBuilder lineReaderBuilder = LineReaderBuilder.builder().terminal(terminal) - .appName("Rulewerk Shell").completer(buildCompleter(interpreter)) - // .expander(expander()) - // .history(buildHistory()) - // .highlighter(buildHighlighter()) - ; + .appName("Rulewerk Shell"); + /* + * This allows completion on an empty buffer, rather than inserting a tab + */ + lineReaderBuilder.option(LineReader.Option.INSERT_TAB, false); + lineReaderBuilder.option(LineReader.Option.AUTO_FRESH_LINE, true); - final LineReader lineReader = lineReaderBuilder.build(); + lineReaderBuilder.completer(buildCompleter(interpreter)); - lineReader.unsetOpt(LineReader.Option.INSERT_TAB); // This allows completion on an empty buffer, rather than - // inserting a tab - lineReader.setOpt(LineReader.Option.AUTO_FRESH_LINE); - return lineReader; + return lineReaderBuilder.build(); } private static Completer buildCompleter(final Interpreter interpreter) { @@ -65,7 +62,7 @@ private static Completer buildCompleter(final Interpreter interpreter) { final FileNameCompleter fileNameCompleter = new Completers.FileNameCompleter(); final Set registeredCommandNames = interpreter.getRegisteredCommands(); - final List nodes = new ArrayList<>(); + final List nodes = new ArrayList<>(); registeredCommandNames.stream().map(commandName -> "@" + commandName).forEach(serializedCommandName -> { if (serializedCommandName.equals("@load")) { nodes.add(TreeCompleter.node(serializedCommandName, TreeCompleter.node(fileNameCompleter))); @@ -77,7 +74,6 @@ private static Completer buildCompleter(final Interpreter interpreter) { } }); return new TreeCompleter(nodes); - } public static Terminal buildTerminal() throws IOException { diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultConfigurationTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultConfigurationTest.java index 2c19f1f06..81ca2e0e1 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultConfigurationTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultConfigurationTest.java @@ -23,10 +23,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; -import java.io.IOException; - import org.jline.terminal.Terminal; -import org.jline.terminal.TerminalBuilder; import org.jline.utils.AttributedString; import org.junit.Test; import org.mockito.Mockito; @@ -47,16 +44,4 @@ public void buildPrompt() { assertTrue(string.length() >= 10); } - public void buildTerminal() throws IOException { - final TerminalBuilder terminalBuilderMock = Mockito.mock(TerminalBuilder.class); - Mockito.when(TerminalBuilder.builder()).thenReturn(terminalBuilderMock); - - Mockito.verify(terminalBuilderMock.dumb(true)); - Mockito.verify(terminalBuilderMock.jansi(true)); - Mockito.verify(terminalBuilderMock.jna(false)); - Mockito.verify(terminalBuilderMock.system(true)); - Mockito.verify(terminalBuilderMock.build()); - - } - } diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellTest.java index 2c9bfb90a..566ead3a1 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellTest.java @@ -47,5 +47,4 @@ public void initializeInterpreter() { assertEquals(writer, interpreter.getWriter()); } - } From 76b1359e283006cc32698a47e1e6003b7fee653b Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 28 Aug 2020 15:58:26 +0200 Subject: [PATCH 0786/1003] new caching TermFactory --- .../model/implementation/TermFactory.java | 172 ++++++++++++++++++ .../implementation/Skolemization.java | 20 +- .../{ => implementation}/ArgumentTest.java | 2 +- .../ConjunctionImplTest.java | 2 +- .../DataSourceDeclarationTest.java | 2 +- .../model/{ => implementation}/FactTest.java | 2 +- .../MergingPrefixDeclarationRegistryTest.java | 2 +- .../NegativeLiteralImplTest.java | 2 +- .../PositiveLiteralImplTest.java | 2 +- .../PredicateImplTest.java | 2 +- .../{ => implementation}/RuleImplTest.java | 2 +- .../{ => implementation}/SerializerTest.java | 2 +- .../model/implementation/TermFactoryTest.java | 100 ++++++++++ .../{ => implementation}/TermImplTest.java | 2 +- .../implementation/SkolemizationTest.java | 14 +- 15 files changed, 304 insertions(+), 24 deletions(-) create mode 100644 rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/TermFactory.java rename rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/{ => implementation}/ArgumentTest.java (98%) rename rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/{ => implementation}/ConjunctionImplTest.java (99%) rename rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/{ => implementation}/DataSourceDeclarationTest.java (98%) rename rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/{ => implementation}/FactTest.java (97%) rename rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/{ => implementation}/MergingPrefixDeclarationRegistryTest.java (99%) rename rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/{ => implementation}/NegativeLiteralImplTest.java (99%) rename rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/{ => implementation}/PositiveLiteralImplTest.java (99%) rename rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/{ => implementation}/PredicateImplTest.java (97%) rename rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/{ => implementation}/RuleImplTest.java (99%) rename rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/{ => implementation}/SerializerTest.java (99%) create mode 100644 rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermFactoryTest.java rename rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/{ => implementation}/TermImplTest.java (99%) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/TermFactory.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/TermFactory.java new file mode 100644 index 000000000..c32c0bb82 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/TermFactory.java @@ -0,0 +1,172 @@ +package org.semanticweb.rulewerk.core.model.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.LinkedHashMap; +import java.util.Map; + +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; +import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; +import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; + +/** + * Class for creating various kinds of terms. Instances of this class maintain + * an internal cache that allows them to re-use the generated objects, which is + * useful to safe memory since the same term is often needed in multiple places. + * + * @author Markus Kroetzsch + * + */ +public class TermFactory { + + /** + * Use of Java's LinkedHashMap for implementing a simple LRU cache that is used + * here for mapping VLog ids to terms. + * + * @author Markus Kroetzsch + * + * @param + * @param + */ + static class SimpleLruMap extends LinkedHashMap { + private static final long serialVersionUID = 7151535464938775359L; + private int maxCapacity; + + public SimpleLruMap(int initialCapacity, int maxCapacity) { + super(initialCapacity, 0.75f, true); + this.maxCapacity = maxCapacity; + } + + @Override + protected boolean removeEldestEntry(Map.Entry eldest) { + return size() >= this.maxCapacity; + } + } + + final private SimpleLruMap abstractConstants; + final private SimpleLruMap existentialVariables; + final private SimpleLruMap universalVariables; + final private SimpleLruMap predicates; + + public TermFactory() { + this(65536); + } + + public TermFactory(int cacheSize) { + abstractConstants = new SimpleLruMap<>(256, cacheSize); + existentialVariables = new SimpleLruMap<>(64, 1024); + universalVariables = new SimpleLruMap<>(64, 1024); + predicates = new SimpleLruMap<>(256, 4096); + } + + /** + * Creates a {@link UniversalVariable}. + * + * @param name name of the variable + * @return a {@link UniversalVariable} corresponding to the input. + */ + public UniversalVariable makeUniversalVariable(String name) { + if (universalVariables.containsKey(name)) { + return universalVariables.get(name); + } else { + UniversalVariable result = new UniversalVariableImpl(name); + universalVariables.put(name, result); + return result; + } + } + + /** + * Creates an {@link ExistentialVariable}. + * + * @param name name of the variable + * @return a {@link ExistentialVariable} corresponding to the input. + */ + public ExistentialVariable makeExistentialVariable(String name) { + if (existentialVariables.containsKey(name)) { + return existentialVariables.get(name); + } else { + ExistentialVariable result = new ExistentialVariableImpl(name); + existentialVariables.put(name, result); + return result; + } + } + + /** + * Creates an {@link AbstractConstant}. + * + * @param name name of the constant + * @return an {@link AbstractConstant} corresponding to the input. + */ + public AbstractConstant makeAbstractConstant(String name) { + if (abstractConstants.containsKey(name)) { + return abstractConstants.get(name); + } else { + AbstractConstant result = new AbstractConstantImpl(name); + abstractConstants.put(name, result); + return result; + } + } + + /** + * Creates a {@link DatatypeConstant} from the given input. + * + * @param lexicalValue the lexical representation of the data value + * @param datatypeIri the full absolute IRI of the datatype of this literal + * @return a {@link DatatypeConstant} corresponding to the input. + */ + public DatatypeConstant makeDatatypeConstant(String lexicalValue, String datatypeIri) { + return new DatatypeConstantImpl(lexicalValue, datatypeIri); + } + + /** + * Creates a {@link LanguageStringConstant} from the given input. + * + * @param string the string value of the constant + * @param languageTag the BCP 47 language tag of the constant; should be in + * lower case + * @return a {@link LanguageStringConstant} corresponding to the input. + */ + public LanguageStringConstant makeLanguageStringConstant(String string, String languageTag) { + return new LanguageStringConstantImpl(string, languageTag); + } + + /** + * Creates a {@link Predicate}. + * + * @param name non-blank predicate name + * @param arity predicate arity, strictly greater than 0 + * @return a {@link Predicate} corresponding to the input. + */ + public Predicate makePredicate(String name, int arity) { + String key = name + "#" + String.valueOf(arity); + if (predicates.containsKey(key)) { + return predicates.get(key); + } else { + Predicate result = new PredicateImpl(name, arity); + predicates.put(key, result); + return result; + } + } + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java index e51a6b5d7..54080781b 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java @@ -25,8 +25,8 @@ import org.semanticweb.rulewerk.core.model.api.AbstractConstant; import org.semanticweb.rulewerk.core.model.api.NamedNull; -import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; import org.semanticweb.rulewerk.core.model.implementation.RenamedNamedNull; +import org.semanticweb.rulewerk.core.model.implementation.TermFactory; /** * A class that implements skolemization and collision-free renaming of named @@ -73,13 +73,15 @@ public RenamedNamedNull getRenamedNamedNull(String name) { /** * Creates a skolem constant that is determined by the given original name. * - * @param name the name of the {@link NamedNull} to skolemize (or any other - * string for which to create a unique renaming) + * @param name the name of the {@link NamedNull} to skolemize (or any + * other string for which to create a unique renaming) + * @param termFactory the {@link TermFactory} that is used to create the + * constant * @return a {@link AbstractConstant} with an IRI that is specific to this * instance and {@code name}. */ - public AbstractConstant getSkolemConstant(String name) { - return new AbstractConstantImpl(getSkolemConstantName(name)); + public AbstractConstant getSkolemConstant(String name, TermFactory termFactory) { + return termFactory.makeAbstractConstant(getSkolemConstantName(name)); } /** @@ -87,12 +89,14 @@ public AbstractConstant getSkolemConstant(String name) { * The method ensures that a new unique name is generated unless the given * object is already a {@link RenamedNamedNull}. * - * @param namedNull the {@link NamedNull} to skolemize + * @param namedNull the {@link NamedNull} to skolemize + * @param termFactory the {@link TermFactory} that is used to create the + * constant * @return a {@link AbstractConstant} with an IRI that is specific to this * instance and {@code namedNull}. */ - public AbstractConstant getSkolemConstant(NamedNull namedNull) { - return new AbstractConstantImpl(getSkolemConstantName(namedNull)); + public AbstractConstant getSkolemConstant(NamedNull namedNull, TermFactory termFactory) { + return termFactory.makeAbstractConstant(getSkolemConstantName(namedNull)); } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ArgumentTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/ArgumentTest.java similarity index 98% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ArgumentTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/ArgumentTest.java index 76efe55d4..66ae2f550 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ArgumentTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/ArgumentTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.rulewerk.core.model; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ConjunctionImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/ConjunctionImplTest.java similarity index 99% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ConjunctionImplTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/ConjunctionImplTest.java index 322bbda3f..9acc89f28 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ConjunctionImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/ConjunctionImplTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.rulewerk.core.model; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/DataSourceDeclarationTest.java similarity index 98% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/DataSourceDeclarationTest.java index ef5c89e30..9df9cd3e0 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/DataSourceDeclarationTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.rulewerk.core.model; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/FactTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/FactTest.java similarity index 97% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/FactTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/FactTest.java index 7539d60c9..1aa8017de 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/FactTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/FactTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.rulewerk.core.model; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistryTest.java similarity index 99% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistryTest.java index 157fbfded..6ea303c2c 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistryTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.rulewerk.core.model; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/NegativeLiteralImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImplTest.java similarity index 99% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/NegativeLiteralImplTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImplTest.java index 34a7e9fc9..475f1ab8b 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/NegativeLiteralImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImplTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.rulewerk.core.model; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PositiveLiteralImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImplTest.java similarity index 99% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PositiveLiteralImplTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImplTest.java index d4a93e489..c0613996c 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PositiveLiteralImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImplTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.rulewerk.core.model; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PredicateImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImplTest.java similarity index 97% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PredicateImplTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImplTest.java index d8274db91..f77d4f24f 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PredicateImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImplTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.rulewerk.core.model; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/RuleImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/RuleImplTest.java similarity index 99% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/RuleImplTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/RuleImplTest.java index 5fc0ee6e6..6d234958c 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/RuleImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/RuleImplTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.rulewerk.core.model; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/SerializerTest.java similarity index 99% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/SerializerTest.java index f495de508..977160aeb 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/SerializerTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.rulewerk.core.model; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermFactoryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermFactoryTest.java new file mode 100644 index 000000000..388289c6d --- /dev/null +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermFactoryTest.java @@ -0,0 +1,100 @@ +package org.semanticweb.rulewerk.core.model.implementation; + +import static org.junit.Assert.*; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.DatatypeConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.ExistentialVariableImpl; +import org.semanticweb.rulewerk.core.model.implementation.LanguageStringConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.TermFactory; +import org.semanticweb.rulewerk.core.model.implementation.UniversalVariableImpl; + +public class TermFactoryTest { + + @Test + public void universalVariable_reused() { + TermFactory termFactory = new TermFactory(); + Term term1 = termFactory.makeUniversalVariable("X"); + Term term2 = termFactory.makeUniversalVariable("Y"); + Term term3 = termFactory.makeUniversalVariable("X"); + Term term4 = new UniversalVariableImpl("X"); + + assertNotEquals(term1, term2); + assertTrue(term1 == term3); + assertEquals(term1, term4); + } + + @Test + public void existentialVariable_reused() { + TermFactory termFactory = new TermFactory(); + Term term1 = termFactory.makeExistentialVariable("X"); + Term term2 = termFactory.makeExistentialVariable("Y"); + Term term3 = termFactory.makeExistentialVariable("X"); + Term term4 = new ExistentialVariableImpl("X"); + + assertNotEquals(term1, term2); + assertTrue(term1 == term3); + assertEquals(term1, term4); + } + + @Test + public void abstractConstant_reused() { + TermFactory termFactory = new TermFactory(); + Term term1 = termFactory.makeAbstractConstant("X"); + Term term2 = termFactory.makeAbstractConstant("Y"); + Term term3 = termFactory.makeAbstractConstant("X"); + Term term4 = new AbstractConstantImpl("X"); + + assertNotEquals(term1, term2); + assertTrue(term1 == term3); + assertEquals(term1, term4); + } + + @Test + public void predicate_reused() { + TermFactory termFactory = new TermFactory(); + Predicate pred1 = termFactory.makePredicate("p", 1); + Predicate pred2 = termFactory.makePredicate("q", 1); + Predicate pred3 = termFactory.makePredicate("p", 2); + Predicate pred4 = termFactory.makePredicate("p", 1); + + assertNotEquals(pred1, pred2); + assertNotEquals(pred1, pred3); + assertTrue(pred1 == pred4); + } + + @Test + public void datatypeConstant_succeeds() { + TermFactory termFactory = new TermFactory(); + Term term1 = termFactory.makeDatatypeConstant("abc", "http://test"); + Term term2 = new DatatypeConstantImpl("abc", "http://test"); + + assertEquals(term1, term2); + } + + @Test + public void languageConstant_succeeds() { + TermFactory termFactory = new TermFactory(); + Term term1 = termFactory.makeLanguageStringConstant("abc", "de"); + Term term2 = new LanguageStringConstantImpl("abc", "de"); + + assertEquals(term1, term2); + } + + @Test + public void lruCache_works() { + TermFactory.SimpleLruMap map = new TermFactory.SimpleLruMap<>(1, 3); + map.put("a", "test"); + map.put("b", "test"); + map.put("c", "test"); + map.put("c", "test2"); + + assertTrue(map.containsKey("b")); + assertTrue(map.containsKey("c")); + assertFalse(map.containsKey("a")); + } + +} diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/TermImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermImplTest.java similarity index 99% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/TermImplTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermImplTest.java index 2420dc479..c8230303d 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/TermImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermImplTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.rulewerk.core.model; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SkolemizationTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SkolemizationTest.java index a382aa220..d87cd495f 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SkolemizationTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SkolemizationTest.java @@ -27,6 +27,7 @@ import org.semanticweb.rulewerk.core.model.api.AbstractConstant; import org.semanticweb.rulewerk.core.model.api.NamedNull; import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.model.implementation.TermFactory; public class SkolemizationTest { private Skolemization skolemization; @@ -76,23 +77,26 @@ public void skolemizeNamedNull_differentInstancesDifferentNames_mapsToDifferentN @Test public void skolemConstant_succeeds() { - AbstractConstant skolem = skolemization.getSkolemConstant(name1); + TermFactory termFactory = new TermFactory(); + AbstractConstant skolem = skolemization.getSkolemConstant(name1, termFactory); assertTrue(skolem.getName().startsWith(Skolemization.SKOLEM_IRI_PREFIX)); } @Test public void skolemConstantFromNamedNull_succeeds() { + TermFactory termFactory = new TermFactory(); NamedNull null1 = new NamedNullImpl(name1); - AbstractConstant skolem1 = skolemization.getSkolemConstant(null1); - AbstractConstant skolem2 = skolemization.getSkolemConstant(name1); + AbstractConstant skolem1 = skolemization.getSkolemConstant(null1, termFactory); + AbstractConstant skolem2 = skolemization.getSkolemConstant(name1, termFactory); assertEquals(skolem2, skolem1); } @Test public void skolemConstantFromRenamedNamedNull_succeeds() { + TermFactory termFactory = new TermFactory(); NamedNull null1 = skolemization.getRenamedNamedNull(name1); - AbstractConstant skolem1 = skolemization.getSkolemConstant(null1); - AbstractConstant skolem2 = skolemization.getSkolemConstant(name1); + AbstractConstant skolem1 = skolemization.getSkolemConstant(null1, termFactory); + AbstractConstant skolem2 = skolemization.getSkolemConstant(name1, termFactory); assertEquals(skolem2, skolem1); } } From c1b229807b2a7966db3df6202f1b0849fd86ecdf Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 28 Aug 2020 15:59:33 +0200 Subject: [PATCH 0787/1003] use TermFactory --- .../rulewerk/rdf/RdfValueToTermConverter.java | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java index de70adf90..c152e19c5 100644 --- a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java +++ b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java @@ -31,10 +31,7 @@ import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.reasoner.implementation.Skolemization; -import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; -import org.semanticweb.rulewerk.core.model.implementation.DatatypeConstantImpl; -import org.semanticweb.rulewerk.core.model.implementation.LanguageStringConstantImpl; -import org.semanticweb.rulewerk.core.model.implementation.PredicateImpl; +import org.semanticweb.rulewerk.core.model.implementation.TermFactory; /** * Helper class to convert RDF ters to Rulewerk {@link Term} objects. @@ -46,6 +43,7 @@ final class RdfValueToTermConverter { final boolean skolemize; final Skolemization skolemization = new Skolemization(); + final TermFactory termFactory = new TermFactory(); /** * Constructor. @@ -75,7 +73,7 @@ public Term convertBlankNode(final BNode bNode) { // redundant. But we want a RenamedNamedNull here, and a consistent name format // is nice too. if (skolemize) { - return skolemization.getSkolemConstant(bNode.getID()); + return skolemization.getSkolemConstant(bNode.getID(), termFactory); } else { return skolemization.getRenamedNamedNull(bNode.getID()); } @@ -83,24 +81,24 @@ public Term convertBlankNode(final BNode bNode) { public Term convertUri(final URI uri) { final String escapedURIString = NTriplesUtil.escapeString(uri.toString()); - return new AbstractConstantImpl(escapedURIString); + return termFactory.makeAbstractConstant(escapedURIString); } public Term convertLiteral(final Literal literal) { final URI datatype = literal.getDatatype(); if (datatype != null) { - return new DatatypeConstantImpl(XMLDatatypeUtil.normalize(literal.getLabel(), datatype), + return termFactory.makeDatatypeConstant(XMLDatatypeUtil.normalize(literal.getLabel(), datatype), datatype.toString()); } else if (literal.getLanguage() != null) { - return new LanguageStringConstantImpl(literal.getLabel(), literal.getLanguage()); + return termFactory.makeLanguageStringConstant(literal.getLabel(), literal.getLanguage()); } else { - return new DatatypeConstantImpl(literal.getLabel(), PrefixDeclarationRegistry.XSD_STRING); + return termFactory.makeDatatypeConstant(literal.getLabel(), PrefixDeclarationRegistry.XSD_STRING); } } public Predicate convertUriToPredicate(final URI uri, int arity) { final String escapedURIString = NTriplesUtil.escapeString(uri.toString()); - return new PredicateImpl(escapedURIString, arity); + return termFactory.makePredicate(escapedURIString, arity); } } From 99e12dedbae5b078b8d791d3c72add72a5d77482 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 28 Aug 2020 16:14:52 +0200 Subject: [PATCH 0788/1003] use TermFactory in parser --- .../rulewerk/parser/ParserConfiguration.java | 8 +++-- .../rulewerk/parser/javacc/JavaCCParser.jj | 12 +++---- .../parser/javacc/JavaCCParserBase.java | 34 +++++++++++++++---- 3 files changed, 38 insertions(+), 16 deletions(-) diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java index cec1ad19e..0bf66066d 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java @@ -32,7 +32,7 @@ import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Term; -import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.TermFactory; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.parser.javacc.JavaCCParserBase.ConfigurableLiteralDelimiter; import org.semanticweb.rulewerk.parser.datasources.DataSourceDeclarationHandler; @@ -129,12 +129,14 @@ public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(PositiveLit * * @param lexicalForm the (unescaped) lexical form of the constant. * @param datatype the datatype, or null if not present. + * @param termFactory the {@link TermFactory} to use for creating the result * * @throws ParsingException when the lexical form is invalid for the given data * type. * @return the {@link Constant} corresponding to the given arguments. */ - public Constant parseDatatypeConstant(final String lexicalForm, final String datatype) throws ParsingException { + public Constant parseDatatypeConstant(final String lexicalForm, final String datatype, + final TermFactory termFactory) throws ParsingException { final String type = ((datatype != null) ? datatype : PrefixDeclarationRegistry.XSD_STRING); final DatatypeConstantHandler handler = this.datatypes.get(type); @@ -142,7 +144,7 @@ public Constant parseDatatypeConstant(final String lexicalForm, final String dat return handler.createConstant(lexicalForm); } - return Expressions.makeDatatypeConstant(lexicalForm, type); + return termFactory.makeDatatypeConstant(lexicalForm, type); } /** diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj index 3dbff82a7..1563b9a6c 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj @@ -257,7 +257,7 @@ PositiveLiteral positiveLiteral(FormulaContext context) throws PrefixDeclaration String predicateName; } { predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > { - return Expressions.makePositiveLiteral(predicateName, terms); + return Expressions.makePositiveLiteral(createPredicate(predicateName,terms.size()), terms); } } @@ -268,7 +268,7 @@ Fact fact(FormulaContext context) throws PrefixDeclarationException : { } { predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > < DOT > { try { - return Expressions.makeFact(predicateName, terms); + return Expressions.makeFact(createPredicate(predicateName,terms.size()), terms); } catch (IllegalArgumentException e) { throw makeParseExceptionWithCause("Error parsing fact: " + e.getMessage(), e); } @@ -280,7 +280,7 @@ NegativeLiteral negativeLiteral(FormulaContext context) throws PrefixDeclaration String predicateName; } { < TILDE > predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > { - return Expressions.makeNegativeLiteral(predicateName, terms); + return Expressions.makeNegativeLiteral(createPredicate(predicateName,terms.size()), terms); } } @@ -325,7 +325,7 @@ Term term(FormulaContext context) throws PrefixDeclarationException : { headUniVars.add(s); else if (context == FormulaContext.BODY) bodyVars.add(s); - return Expressions.makeUniversalVariable(s); + return createUniversalVariable(s); } | t = < EXIVAR > { s = t.image.substring(1); @@ -333,7 +333,7 @@ Term term(FormulaContext context) throws PrefixDeclarationException : { headExiVars.add(s); if (context == FormulaContext.BODY) throw new ParseException("Existentialy quantified variables can not appear in the body. Line: " + t.beginLine + ", Column: "+ t.beginColumn); - return Expressions.makeExistentialVariable(s); + return createExistentialVariable(s); } | try { tt = ConfigurableLiteral () { return tt; } @@ -357,7 +357,7 @@ Constant RDFLiteral() throws PrefixDeclarationException : { } { lex = String() ( lang = < LANGTAG > | < DATATYPE > dt = absoluteIri() )? { if (lang != null) { - return Expressions.makeLanguageStringConstant(lex, lang.image); + return createLanguageStringConstant(lex, lang.image); } return createConstant(lex, dt); } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java index f367bb382..458a849fd 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java @@ -28,14 +28,17 @@ import org.semanticweb.rulewerk.core.model.api.Argument; import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; +import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; import org.semanticweb.rulewerk.core.model.api.NamedNull; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Statement; import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.TermFactory; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.implementation.Skolemization; import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; @@ -64,6 +67,7 @@ public class JavaCCParserBase { private KnowledgeBase knowledgeBase; private ParserConfiguration parserConfiguration; private Skolemization skolemization = new Skolemization(); + private TermFactory termFactory = new TermFactory(); /** * "Local" variable to remember (universal) body variables during parsing. @@ -143,7 +147,7 @@ AbstractConstant createConstant(String lexicalForm) throws ParseException { } catch (PrefixDeclarationException e) { throw makeParseExceptionWithCause("Failed to parse IRI", e); } - return Expressions.makeAbstractConstant(absoluteIri); + return termFactory.makeAbstractConstant(absoluteIri); } /** @@ -155,16 +159,32 @@ AbstractConstant createConstant(String lexicalForm) throws ParseException { */ Constant createConstant(String lexicalForm, String datatype) throws ParseException { try { - return parserConfiguration.parseDatatypeConstant(lexicalForm, datatype); + return parserConfiguration.parseDatatypeConstant(lexicalForm, datatype, termFactory); } catch (ParsingException e) { throw makeParseExceptionWithCause("Failed to parse Constant", e); } } - NamedNull createNamedNull(String lexicalForm) throws ParseException { + NamedNull createNamedNull(String lexicalForm) { return this.skolemization.getRenamedNamedNull(lexicalForm); } + UniversalVariable createUniversalVariable(String name) { + return termFactory.makeUniversalVariable(name); + } + + ExistentialVariable createExistentialVariable(String name) { + return termFactory.makeExistentialVariable(name); + } + + LanguageStringConstant createLanguageStringConstant(String string, String languageTag) { + return termFactory.makeLanguageStringConstant(string, languageTag); + } + + Predicate createPredicate(String name, int arity) { + return termFactory.makePredicate(name, arity); + } + void addStatement(Statement statement) { knowledgeBase.addStatement(statement); } @@ -178,7 +198,7 @@ void addDataSource(String predicateName, int arity, DataSource dataSource) throw } } - Predicate predicate = Expressions.makePredicate(predicateName, arity); + Predicate predicate = termFactory.makePredicate(predicateName, arity); addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); } @@ -345,8 +365,8 @@ Term parseConfigurableLiteral(ConfigurableLiteralDelimiter delimiter, String syn return parserConfiguration.parseConfigurableLiteral(delimiter, syntacticForm, subParserFactory); } - KnowledgeBase parseDirectiveStatement(String name, List arguments, - SubParserFactory subParserFactory) throws ParseException { + KnowledgeBase parseDirectiveStatement(String name, List arguments, SubParserFactory subParserFactory) + throws ParseException { try { return parserConfiguration.parseDirectiveStatement(name, arguments, subParserFactory); } catch (ParsingException e) { From 274bb07d3cc1850ec152b124bfecd9c13ae1e2cb Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 28 Aug 2020 16:15:09 +0200 Subject: [PATCH 0789/1003] license header --- .../model/implementation/TermFactoryTest.java | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermFactoryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermFactoryTest.java index 388289c6d..7719f9efa 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermFactoryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermFactoryTest.java @@ -1,5 +1,25 @@ package org.semanticweb.rulewerk.core.model.implementation; +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import static org.junit.Assert.*; import org.junit.Test; From 33fbdfe8e6108957c89b99aae4acfd54785f6743 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 28 Aug 2020 17:30:18 +0200 Subject: [PATCH 0790/1003] use BufferedWriter for 100 times speedup --- .../java/org/semanticweb/rulewerk/commands/Interpreter.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index 447a50ca2..b9e7ef7e3 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -1,5 +1,6 @@ package org.semanticweb.rulewerk.commands; +import java.io.BufferedWriter; import java.io.ByteArrayInputStream; import java.io.FileInputStream; import java.io.FileNotFoundException; @@ -242,7 +243,7 @@ public static PositiveLiteral extractPositiveLiteralArgument(final Command comma * @throws FileNotFoundException */ public Writer getFileWriter(String fileName) throws FileNotFoundException { - return new OutputStreamWriter(new FileOutputStream(fileName), StandardCharsets.UTF_8); + return new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), StandardCharsets.UTF_8)); } /** From 41e9f333777578856a7f06cabfecdf8834fd97a9 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 28 Aug 2020 18:03:34 +0200 Subject: [PATCH 0791/1003] support % in strings --- .../semanticweb/rulewerk/parser/javacc/JavaCCParser.jj | 2 +- .../rulewerk/parser/RuleParserParseFactTest.java | 4 ++-- .../org/semanticweb/rulewerk/parser/RuleParserTest.java | 8 ++++++++ 3 files changed, 11 insertions(+), 3 deletions(-) diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj index 1563b9a6c..cc7568888 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj @@ -487,7 +487,7 @@ String PrefixedName() throws PrefixDeclarationException : { } // Comments -< * > SKIP : { +< DEFAULT, BODY, DIRECTIVE_ARGUMENTS > SKIP : { < COMMENT : "%" ( ~[ "\n" ] )* "\n" > } diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserParseFactTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserParseFactTest.java index d39446d31..378382700 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserParseFactTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserParseFactTest.java @@ -33,7 +33,7 @@ public class RuleParserParseFactTest implements ParserTestUtils { private final Constant a = Expressions.makeDatatypeConstant("a", PrefixDeclarationRegistry.XSD_STRING); - private final Constant b = Expressions.makeDatatypeConstant("b", PrefixDeclarationRegistry.XSD_STRING); + private final Constant b = Expressions.makeDatatypeConstant("b%c", PrefixDeclarationRegistry.XSD_STRING); private final Fact factA = Expressions.makeFact("p", a); private final Fact factAB = Expressions.makeFact("p", a, b); @@ -45,7 +45,7 @@ public void parseFact_string_succeeds() throws ParsingException { @Test public void parseFact_twoStrings_succeeds() throws ParsingException { - assertEquals(factAB, RuleParser.parseFact("p(\"a\",\"b\") .")); + assertEquals(factAB, RuleParser.parseFact("p(\"a\",\"b%c\") .")); } @Test(expected = ParsingException.class) diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java index 42041cc6d..a7b3be68a 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java @@ -139,6 +139,14 @@ public void testSimpleRule() throws ParsingException { assertEquals(Arrays.asList(rule1), statements); } + @Test + public void testFactWithCommentSymbol() throws ParsingException { + String input = "t(\"%test\") . "; + ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(Expressions.makeFact("t", + Expressions.makeDatatypeConstant("%test", PrefixDeclarationRegistry.XSD_STRING))), statements); + } + @Test public void testNegationRule() throws ParsingException { String input = "@base . " + " q(?X, !Y), r(?X, d) :- ~p(?X,c), p(?X,?Z) . "; From 676c4092717b636ee311a9d468298a56a4646bc1 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 28 Aug 2020 21:27:55 +0200 Subject: [PATCH 0792/1003] correct error reporting --- .../rulewerk/reasoner/vlog/VLogReasoner.java | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java index f49bfef7a..7547ae05c 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java @@ -145,7 +145,7 @@ public void setRuleRewriteStrategy(RuleRewriteStrategy ruleRewritingStrategy) { public RuleRewriteStrategy getRuleRewriteStrategy() { return this.ruleRewriteStrategy; } - + @Override public Correctness getCorrectness() { return this.correctness; @@ -370,11 +370,7 @@ private void runChase() { } catch (final NotStartedException e) { throw new RulewerkRuntimeException("Inconsistent reasoner state.", e); } catch (final MaterializationException e) { - // FIXME: the message generated here is not guaranteed to be the correct - // interpretation of the exception that is caught - throw new RulewerkRuntimeException( - "Knowledge base incompatible with stratified negation: either the Rules are not stratifiable, or the variables in negated atom cannot be bound.", - e); + throw new RulewerkRuntimeException("VLog encounterd an error during materialization: " + e.getMessage(), e); } if (this.reasoningCompleted) { @@ -504,7 +500,7 @@ public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final St } catch (final NotStartedException e) { throw new RulewerkRuntimeException("Inconsistent reasoner state!", e); } catch (final NonExistingPredicateException e1) { - final Correctness correctness = this.getCorrectnessUnknownPredicate(query); + final Correctness correctness = this.getCorrectnessUnknownPredicate(query); this.logWarningOnCorrectness(correctness); return correctness; } @@ -515,7 +511,8 @@ public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final St private void validateBeforeQuerying(final PositiveLiteral query) { this.validateNotClosed(); if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { - throw new ReasonerStateException(this.reasonerState, "Querying is not allowed before Reasoner#reason() was first called!"); + throw new ReasonerStateException(this.reasonerState, + "Querying is not allowed before Reasoner#reason() was first called!"); } Validate.notNull(query, "Query atom must not be null!"); } From cb491623884a5e7f8f908af4e4bac6f4af215b24 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Sun, 30 Aug 2020 19:00:56 +0200 Subject: [PATCH 0793/1003] test InteractiveShell run, mock terminal configuration --- .../rulewerk/client/picocli/Main.java | 7 ++-- ...on.java => DefaultShellConfiguration.java} | 40 +++++++++++-------- ...Shell.java => InteractiveShellClient.java} | 12 +++--- .../client/shell/ShellConfiguration.java | 17 ++++++++ ...ava => DefaultShellConfigurationTest.java} | 6 +-- ...t.java => InteractiveShellClientTest.java} | 35 +++++++++++++++- .../rulewerk/client/shell/ShellTestUtils.java | 11 +++++ 7 files changed, 97 insertions(+), 31 deletions(-) rename rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/{DefaultConfiguration.java => DefaultShellConfiguration.java} (71%) rename rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/{InteractiveShell.java => InteractiveShellClient.java} (81%) create mode 100644 rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java rename rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/{DefaultConfigurationTest.java => DefaultShellConfigurationTest.java} (82%) rename rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/{InteractiveShellTest.java => InteractiveShellClientTest.java} (51%) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java index 409bd3a5b..0772d76e7 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java @@ -26,7 +26,8 @@ import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.apache.log4j.PatternLayout; -import org.semanticweb.rulewerk.client.shell.InteractiveShell; +import org.semanticweb.rulewerk.client.shell.DefaultShellConfiguration; +import org.semanticweb.rulewerk.client.shell.InteractiveShellClient; import picocli.CommandLine; import picocli.CommandLine.Command; @@ -38,7 +39,7 @@ * @author Irina Dragoste * */ -@Command(name = "", description = "A command line client for Rulewerk.", subcommands = { InteractiveShell.class, +@Command(name = "", description = "A command line client for Rulewerk.", subcommands = { InteractiveShellClient.class, RulewerkClientMaterialize.class }) public class Main { @@ -46,7 +47,7 @@ public static void main(final String[] args) throws IOException { configureLogging(); if (args.length == 0 || (args.length > 0 && args[0].equals("shell"))) { - new InteractiveShell().run(); + new InteractiveShellClient().run(new DefaultShellConfiguration()); } else { if (args[0].equals("materialize")) { final CommandLine commandline = new CommandLine(new RulewerkClientMaterialize()); diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java similarity index 71% rename from rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java rename to rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java index 08b7242e5..cda67f041 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java @@ -38,26 +38,32 @@ import org.jline.utils.AttributedStyle; import org.semanticweb.rulewerk.commands.Interpreter; -public final class DefaultConfiguration { +public class DefaultShellConfiguration implements ShellConfiguration { - private DefaultConfiguration() { + public static final String PROMPT_STRING = "rulewerk> "; + + @Override + public LineReader buildLineReader(final Terminal terminal, final Interpreter interpreter) { + final LineReaderBuilder lineReaderBuilder = this.getDefaultLineReaderConfiguration(terminal); + + lineReaderBuilder.terminal(terminal); + lineReaderBuilder.completer(this.buildCompleter(interpreter)); + + return lineReaderBuilder.build(); } - public static LineReader buildLineReader(final Terminal terminal, final Interpreter interpreter) { - final LineReaderBuilder lineReaderBuilder = LineReaderBuilder.builder().terminal(terminal) + LineReaderBuilder getDefaultLineReaderConfiguration(final Terminal terminal) { + final LineReaderBuilder lineReaderBuilder = LineReaderBuilder.builder() .appName("Rulewerk Shell"); /* * This allows completion on an empty buffer, rather than inserting a tab */ lineReaderBuilder.option(LineReader.Option.INSERT_TAB, false); lineReaderBuilder.option(LineReader.Option.AUTO_FRESH_LINE, true); - - lineReaderBuilder.completer(buildCompleter(interpreter)); - - return lineReaderBuilder.build(); + return lineReaderBuilder; } - private static Completer buildCompleter(final Interpreter interpreter) { + Completer buildCompleter(final Interpreter interpreter) { // @load and @export commands require a file name as argument final FileNameCompleter fileNameCompleter = new Completers.FileNameCompleter(); @@ -76,21 +82,23 @@ private static Completer buildCompleter(final Interpreter interpreter) { return new TreeCompleter(nodes); } - public static Terminal buildTerminal() throws IOException { - return getDefaultTerminalConfiguration().build(); + @Override + public Terminal buildTerminal() throws IOException { + return this.getDefaultTerminalConfiguration().build(); } - static TerminalBuilder getDefaultTerminalConfiguration() { + TerminalBuilder getDefaultTerminalConfiguration() { return TerminalBuilder.builder().dumb(true).jansi(true).jna(false).system(true); } - public static String buildPrompt(final Terminal terminal) { - return getDefaultPromptStyle().toAnsi(terminal); + @Override + public String buildPrompt(final Terminal terminal) { + return this.getDefaultPromptStyle().toAnsi(terminal); } - static AttributedString getDefaultPromptStyle() { + AttributedString getDefaultPromptStyle() { final AttributedStyle promptStyle = AttributedStyle.DEFAULT.foreground(AttributedStyle.YELLOW); - return new AttributedString("rulewerk> ", promptStyle); + return new AttributedString(PROMPT_STRING, promptStyle); } } diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClient.java similarity index 81% rename from rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java rename to rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClient.java index 0eaa6dde1..4c2a9f59c 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClient.java @@ -32,20 +32,18 @@ import picocli.CommandLine.Command; @Command(name = "shell", description = "An interactive shell for Rulewerk. The default command.") -public class InteractiveShell -//implements Runnable +public class InteractiveShellClient { -// @Override - public void run() throws IOException { + public void run(final ShellConfiguration configuration) throws IOException { - final Terminal terminal = DefaultConfiguration.buildTerminal(); + final Terminal terminal = configuration.buildTerminal(); try (Interpreter interpreter = this.initializeInterpreter(terminal)) { final Shell shell = new Shell(interpreter); - final LineReader lineReader = DefaultConfiguration.buildLineReader(terminal, interpreter); - final String prompt = DefaultConfiguration.buildPrompt(terminal); + final LineReader lineReader = configuration.buildLineReader(terminal, interpreter); + final String prompt = configuration.buildPrompt(terminal); shell.run(lineReader, prompt); } diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java new file mode 100644 index 000000000..bb331ae77 --- /dev/null +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java @@ -0,0 +1,17 @@ +package org.semanticweb.rulewerk.client.shell; + +import java.io.IOException; + +import org.jline.reader.LineReader; +import org.jline.terminal.Terminal; +import org.semanticweb.rulewerk.commands.Interpreter; + +public interface ShellConfiguration { + + LineReader buildLineReader(Terminal terminal, Interpreter interpreter); + + Terminal buildTerminal() throws IOException; + + String buildPrompt(Terminal terminal); + +} diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultConfigurationTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfigurationTest.java similarity index 82% rename from rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultConfigurationTest.java rename to rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfigurationTest.java index 81ca2e0e1..e7cab36e7 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultConfigurationTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfigurationTest.java @@ -28,11 +28,11 @@ import org.junit.Test; import org.mockito.Mockito; -public class DefaultConfigurationTest { +public class DefaultShellConfigurationTest { @Test public void buildPromptProvider() { - final AttributedString promptProvider = DefaultConfiguration.getDefaultPromptStyle(); + final AttributedString promptProvider = new DefaultShellConfiguration().getDefaultPromptStyle(); assertEquals("rulewerk> ", promptProvider.toString()); } @@ -40,7 +40,7 @@ public void buildPromptProvider() { public void buildPrompt() { final Terminal terminal = Mockito.mock(Terminal.class); Mockito.when(terminal.getType()).thenReturn(Terminal.TYPE_DUMB); - final String string = DefaultConfiguration.buildPrompt(terminal); + final String string = new DefaultShellConfiguration().buildPrompt(terminal); assertTrue(string.length() >= 10); } diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClientTest.java similarity index 51% rename from rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellTest.java rename to rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClientTest.java index 566ead3a1..dddd998ed 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClientTest.java @@ -23,15 +23,19 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; +import java.io.IOException; import java.io.PrintWriter; +import java.io.StringWriter; +import org.jline.reader.LineReader; import org.jline.terminal.Terminal; +import org.jline.terminal.impl.DumbTerminal; import org.junit.Test; import org.mockito.Mockito; import org.semanticweb.rulewerk.commands.Interpreter; import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; -public class InteractiveShellTest { +public class InteractiveShellClientTest { @Test public void initializeInterpreter() { @@ -39,7 +43,7 @@ public void initializeInterpreter() { final PrintWriter writer = Mockito.mock(PrintWriter.class); Mockito.when(terminal.writer()).thenReturn(writer); - final InteractiveShell interactiveShell = new InteractiveShell(); + final InteractiveShellClient interactiveShell = new InteractiveShellClient(); final Interpreter interpreter = interactiveShell.initializeInterpreter(terminal); assertTrue(interpreter.getParserConfiguration() instanceof DefaultParserConfiguration); @@ -47,4 +51,31 @@ public void initializeInterpreter() { assertEquals(writer, interpreter.getWriter()); } + @Test + public void run_mockConfiguration() throws IOException { + final ShellConfiguration configuration = Mockito.mock(ShellConfiguration.class); + final Terminal terminal = Mockito.mock(DumbTerminal.class); + final StringWriter output = new StringWriter(); + final PrintWriter printWriter = new PrintWriter(output); + Mockito.when(terminal.writer()).thenReturn(printWriter); + + final LineReader lineReader = Mockito.mock(LineReader.class); + Mockito.when(lineReader.readLine("prompt")).thenReturn("help", "exit"); + + Mockito.when(configuration.buildTerminal()).thenReturn(terminal); + Mockito.when(configuration.buildPrompt(terminal)).thenReturn("prompt"); + Mockito.when(configuration.buildLineReader(Mockito.eq(terminal), Mockito.any(Interpreter.class))) + .thenReturn(lineReader); + + final InteractiveShellClient shellClient = new InteractiveShellClient(); + shellClient.run(configuration); + + assertTrue(output.toString().contains("Welcome to the Rulewerk interactive shell.")); + + assertTrue(output.toString().contains("Available commands:")); + + assertTrue(output.toString().contains("Exiting Rulewerk")); + } + + } diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java index 189a83607..fb04f355a 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java @@ -54,6 +54,17 @@ public static Interpreter getMockInterpreter(final Writer writer) { }, terminalStyledPrinter, parserConfiguration); } + public static Interpreter getMockInterpreter(final Writer writer, final Terminal terminal) { + final TerminalStyledPrinter terminalStyledPrinter = new TerminalStyledPrinter(terminal); + + final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + return new Interpreter(Interpreter.EMPTY_KNOWLEDGE_BASE_PROVIDER, (knowledgeBase) -> { + final Reasoner reasoner = Mockito.mock(Reasoner.class); + Mockito.when(reasoner.getKnowledgeBase()).thenReturn(knowledgeBase); + return reasoner; + }, terminalStyledPrinter, parserConfiguration); + } + public static void testIsExitCommand(final Command command) { assertEquals(ExitCommandInterpreter.EXIT_COMMAND.getName(), command.getName()); assertTrue(command.getArguments().isEmpty()); From 178c92fb9cca62947189a613a4bee1115c50700c Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Sun, 30 Aug 2020 19:08:17 +0200 Subject: [PATCH 0794/1003] license header --- .../client/shell/ShellConfiguration.java | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java index bb331ae77..5a0d7adab 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java @@ -1,5 +1,25 @@ package org.semanticweb.rulewerk.client.shell; +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import java.io.IOException; import org.jline.reader.LineReader; From e7638b4b92d8efc9f030df450ed9325011c29a31 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Mon, 31 Aug 2020 11:07:01 +0200 Subject: [PATCH 0795/1003] mock exception thrown in unit test --- .../semanticweb/rulewerk/client/shell/ShellTest.java | 11 ++++++----- .../rulewerk/client/shell/ShellTestUtils.java | 2 +- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java index d5f1d3e18..8c3e81448 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java @@ -294,10 +294,11 @@ public void runCommand_exceptionDuringReading() throws CommandExecutionException final Shell shell = new Shell(interpreterSpy); final LineReader lineReader = Mockito.mock(LineReader.class); - final RuntimeException runtimeException = new RuntimeException("test"); - final RuntimeException runtimeExceptionSpy = Mockito.spy(runtimeException); + final RuntimeException exception = Mockito.mock(RuntimeException.class); + Mockito.when(exception.getMessage()) + .thenReturn("This exception is thrown intentionally as part of a unit test"); - Mockito.when(lineReader.readLine(this.prompt)).thenThrow(runtimeExceptionSpy); + Mockito.when(lineReader.readLine(this.prompt)).thenThrow(exception); final Command command = shell.runCommand(lineReader, this.prompt); assertNull(command); @@ -305,9 +306,9 @@ public void runCommand_exceptionDuringReading() throws CommandExecutionException Mockito.verify(interpreterSpy, Mockito.never()).runCommand(Mockito.any(Command.class)); final String printedResult = writer.toString(); - assertTrue(printedResult.startsWith("Unexpected error: " + runtimeException.getMessage())); + assertTrue(printedResult.startsWith("Unexpected error: " + exception.getMessage())); - Mockito.verify(runtimeExceptionSpy).printStackTrace(); + Mockito.verify(exception).printStackTrace(); } public void testPrintWelcome(final Interpreter interpreterSpy) { diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java index fb04f355a..9d45cb02b 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java @@ -54,7 +54,7 @@ public static Interpreter getMockInterpreter(final Writer writer) { }, terminalStyledPrinter, parserConfiguration); } - public static Interpreter getMockInterpreter(final Writer writer, final Terminal terminal) { + public static Interpreter getMockInterpreter(final Terminal terminal) { final TerminalStyledPrinter terminalStyledPrinter = new TerminalStyledPrinter(terminal); final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); From 32cff14e83ca3465ba68f0c1fcdb36dddb45eacc Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 31 Aug 2020 13:48:35 +0200 Subject: [PATCH 0796/1003] improved exception error reporting --- .../rulewerk/parser/directives/ImportFileDirectiveHandler.java | 2 +- .../semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java index 2580a2b56..403238df4 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java @@ -54,7 +54,7 @@ public KnowledgeBase handleDirective(List arguments, final SubParserFa RuleParser.parseInto(kb, stream, parserConfiguration); }); } catch (RulewerkException | IOException | IllegalArgumentException e) { - throw new ParsingException("Failed while trying to import rules file \"" + file.getName() + "\"", e); + throw new ParsingException("Could not import rules file \"" + file.getName() + "\": " + e.getMessage(), e); } return knowledgeBase; diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java index 458a849fd..981632edf 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java @@ -370,7 +370,7 @@ KnowledgeBase parseDirectiveStatement(String name, List arguments, Sub try { return parserConfiguration.parseDirectiveStatement(name, arguments, subParserFactory); } catch (ParsingException e) { - throw makeParseExceptionWithCause("Failed while trying to parse directive statement", e); + throw makeParseExceptionWithCause(e.getMessage(), e); } } From 6dab3cb170ea1a99f98a5f88cdd5c3e9de71d683 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 1 Sep 2020 10:14:00 +0200 Subject: [PATCH 0797/1003] improved spelling --- .../rulewerk/commands/LoadCommandInterpreter.java | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index 62878f8be..952f3060c 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -96,6 +96,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio int countRulesBefore = interpreter.getKnowledgeBase().getRules().size(); int countFactsBefore = interpreter.getKnowledgeBase().getFacts().size(); + int countDataSourceDeclarationsBefore = interpreter.getKnowledgeBase().getDataSourceDeclarations().size(); if (TASK_RLS.equals(task)) { loadKb(interpreter, fileName); @@ -108,9 +109,11 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio "Unknown task " + task + ". Should be one of " + TASK_RLS + ", " + TASK_OWL + ", " + TASK_RDF); } - interpreter.printNormal( - "Loaded " + (interpreter.getKnowledgeBase().getFacts().size() - countFactsBefore) + " new fact(s) and " - + (interpreter.getKnowledgeBase().getRules().size() - countRulesBefore) + " new rule(s).\n"); + interpreter.printNormal("Loaded " + (interpreter.getKnowledgeBase().getFacts().size() - countFactsBefore) + + " new fact(s), " + (interpreter.getKnowledgeBase().getRules().size() - countRulesBefore) + + " new rule(s), and " + (interpreter.getKnowledgeBase().getDataSourceDeclarations().size() + - countDataSourceDeclarationsBefore) + + " new datasource declaration(s).\n"); } From 914a30021ae8023837ef2d606ba6b38dd962d62b Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 1 Sep 2020 20:01:45 +0200 Subject: [PATCH 0798/1003] test completers --- .../shell/DefaultShellConfiguration.java | 29 +++-- .../client/shell/InteractiveShellClient.java | 2 +- .../rulewerk/client/shell/Shell.java | 13 ++- .../client/shell/ShellConfiguration.java | 4 +- .../shell/DefaultShellConfigurationTest.java | 102 ++++++++++++++++++ .../shell/InteractiveShellClientTest.java | 3 +- .../rulewerk/client/shell/ShellTestUtils.java | 8 +- .../rulewerk/commands/Interpreter.java | 22 ++-- 8 files changed, 141 insertions(+), 42 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java index cda67f041..d108ea6c0 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java @@ -22,13 +22,12 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Collection; import java.util.List; -import java.util.Set; import org.jline.builtins.Completers; import org.jline.builtins.Completers.FileNameCompleter; import org.jline.builtins.Completers.TreeCompleter; -import org.jline.reader.Completer; import org.jline.reader.LineReader; import org.jline.reader.LineReaderBuilder; import org.jline.reader.impl.completer.StringsCompleter; @@ -36,25 +35,24 @@ import org.jline.terminal.TerminalBuilder; import org.jline.utils.AttributedString; import org.jline.utils.AttributedStyle; -import org.semanticweb.rulewerk.commands.Interpreter; public class DefaultShellConfiguration implements ShellConfiguration { public static final String PROMPT_STRING = "rulewerk> "; @Override - public LineReader buildLineReader(final Terminal terminal, final Interpreter interpreter) { - final LineReaderBuilder lineReaderBuilder = this.getDefaultLineReaderConfiguration(terminal); + public LineReader buildLineReader(final Terminal terminal, final Collection registeredCommands) { + final LineReaderBuilder lineReaderBuilder = this.getDefaultLineReaderConfiguration(LineReaderBuilder.builder()); lineReaderBuilder.terminal(terminal); - lineReaderBuilder.completer(this.buildCompleter(interpreter)); + lineReaderBuilder.completer(this.buildCompleter(registeredCommands)); return lineReaderBuilder.build(); } - LineReaderBuilder getDefaultLineReaderConfiguration(final Terminal terminal) { - final LineReaderBuilder lineReaderBuilder = LineReaderBuilder.builder() - .appName("Rulewerk Shell"); + LineReaderBuilder getDefaultLineReaderConfiguration(final LineReaderBuilder lineReaderBuilder) { + + lineReaderBuilder.appName("Rulewerk Shell"); /* * This allows completion on an empty buffer, rather than inserting a tab */ @@ -63,18 +61,17 @@ LineReaderBuilder getDefaultLineReaderConfiguration(final Terminal terminal) { return lineReaderBuilder; } - Completer buildCompleter(final Interpreter interpreter) { + TreeCompleter buildCompleter(final Collection registeredCommands) { // @load and @export commands require a file name as argument final FileNameCompleter fileNameCompleter = new Completers.FileNameCompleter(); - final Set registeredCommandNames = interpreter.getRegisteredCommands(); final List nodes = new ArrayList<>(); - registeredCommandNames.stream().map(commandName -> "@" + commandName).forEach(serializedCommandName -> { + registeredCommands.stream().map(commandName -> "@" + commandName).forEach(serializedCommandName -> { if (serializedCommandName.equals("@load")) { nodes.add(TreeCompleter.node(serializedCommandName, TreeCompleter.node(fileNameCompleter))); } else if (serializedCommandName.equals("@help")) { nodes.add(TreeCompleter.node(serializedCommandName, - TreeCompleter.node(new StringsCompleter(registeredCommandNames)))); + TreeCompleter.node(new StringsCompleter(registeredCommands)))); } else { nodes.add(TreeCompleter.node(serializedCommandName)); } @@ -84,11 +81,11 @@ Completer buildCompleter(final Interpreter interpreter) { @Override public Terminal buildTerminal() throws IOException { - return this.getDefaultTerminalConfiguration().build(); + return this.getDefaultTerminalConfiguration(TerminalBuilder.builder()).build(); } - TerminalBuilder getDefaultTerminalConfiguration() { - return TerminalBuilder.builder().dumb(true).jansi(true).jna(false).system(true); + TerminalBuilder getDefaultTerminalConfiguration(final TerminalBuilder terminalBuilder) { + return terminalBuilder.dumb(true).jansi(true).jna(false).system(true); } @Override diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClient.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClient.java index 4c2a9f59c..cd62e53a5 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClient.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClient.java @@ -42,7 +42,7 @@ public void run(final ShellConfiguration configuration) throws IOException { try (Interpreter interpreter = this.initializeInterpreter(terminal)) { final Shell shell = new Shell(interpreter); - final LineReader lineReader = configuration.buildLineReader(terminal, interpreter); + final LineReader lineReader = configuration.buildLineReader(terminal, shell.getRegisteredCommands()); final String prompt = configuration.buildPrompt(terminal); shell.run(lineReader, prompt); diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index e4c82fc63..665a0a1df 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -1,9 +1,5 @@ package org.semanticweb.rulewerk.client.shell; -import org.jline.reader.EndOfFileException; -import org.jline.reader.LineReader; -import org.jline.reader.UserInterruptException; - /*- * #%L * Rulewerk Client @@ -24,6 +20,11 @@ * #L% */ +import java.util.Set; + +import org.jline.reader.EndOfFileException; +import org.jline.reader.LineReader; +import org.jline.reader.UserInterruptException; import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter; import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter.ExitCommandName; import org.semanticweb.rulewerk.commands.CommandExecutionException; @@ -150,4 +151,8 @@ boolean isRunning() { return this.running; } + public Set getRegisteredCommands() { + return this.interpreter.getRegisteredCommands(); + } + } diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java index 5a0d7adab..9c83ae976 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java @@ -21,14 +21,14 @@ */ import java.io.IOException; +import java.util.Collection; import org.jline.reader.LineReader; import org.jline.terminal.Terminal; -import org.semanticweb.rulewerk.commands.Interpreter; public interface ShellConfiguration { - LineReader buildLineReader(Terminal terminal, Interpreter interpreter); + LineReader buildLineReader(Terminal terminal, Collection commands); Terminal buildTerminal() throws IOException; diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfigurationTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfigurationTest.java index e7cab36e7..87f369cd9 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfigurationTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfigurationTest.java @@ -21,15 +21,37 @@ */ import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; +import java.io.File; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +import org.jline.builtins.Completers.TreeCompleter; +import org.jline.reader.Candidate; +import org.jline.reader.LineReader; +import org.jline.reader.ParsedLine; import org.jline.terminal.Terminal; import org.jline.utils.AttributedString; +import org.junit.Rule; import org.junit.Test; +import org.junit.rules.TemporaryFolder; import org.mockito.Mockito; public class DefaultShellConfigurationTest { + @Rule + public TemporaryFolder folder = new TemporaryFolder(new File(".")); + + public static final List SHELL_COMMANDS = Arrays.asList("help", "load", "assert", "retract", "addsource", + "delsource", "setprefix", "clear", + "reason", "query", "export", "showkb", "exit"); + @Test public void buildPromptProvider() { final AttributedString promptProvider = new DefaultShellConfiguration().getDefaultPromptStyle(); @@ -44,4 +66,84 @@ public void buildPrompt() { assertTrue(string.length() >= 10); } + @Test + public void buildCompleterEmptyLine() { + final ArrayList readWords = new ArrayList(); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + final Set expectedCandidates = SHELL_COMMANDS.stream().map(c -> "@" + c).collect(Collectors.toSet()); + assertEquals(expectedCandidates, candidates); + } + + @Test + public void buildCompleterHelp() { + final ArrayList readWords = new ArrayList(); + readWords.add("@help"); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + final Set expectedCandidates = new HashSet(SHELL_COMMANDS); + assertEquals(expectedCandidates, candidates); + } + + @Test + public void buildCompleterLoad() { + final ArrayList readWords = new ArrayList(); + readWords.add("@load"); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + + assertFalse(candidates.isEmpty()); + final String tempFolderName = this.folder.getRoot().getName(); + assertTrue(candidates.contains(tempFolderName)); + } + + private Set getCompleterCandidates(final ArrayList readWords, final String wordToComplete) { + final List candidates = new ArrayList<>(); + + final TreeCompleter completer = new DefaultShellConfiguration().buildCompleter(SHELL_COMMANDS); + final LineReader reader = Mockito.mock(LineReader.class); + + final ParsedLine parsedLine = this.makeParsedLine(readWords, wordToComplete); + completer.complete(reader, parsedLine, candidates); + return candidates.stream().map(c -> c.value()).collect(Collectors.toSet()); + } + + + private ParsedLine makeParsedLine(final List readWords, final String wordToComplete) { + final ParsedLine parsedLine = new ParsedLine() { + + @Override + public List words() { + return readWords; + } + + @Override + public int wordIndex() { + return readWords.size(); + } + + @Override + public int wordCursor() { + return this.word().length(); + } + + @Override + public String word() { + return wordToComplete; + } + + @Override + public String line() { + // Only used by PipedlineCompleter + return null; + } + + @Override + public int cursor() { + return this.line().length(); + } + }; + return parsedLine; + } + } diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClientTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClientTest.java index dddd998ed..e37722070 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClientTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClientTest.java @@ -31,6 +31,7 @@ import org.jline.terminal.Terminal; import org.jline.terminal.impl.DumbTerminal; import org.junit.Test; +import org.mockito.ArgumentMatchers; import org.mockito.Mockito; import org.semanticweb.rulewerk.commands.Interpreter; import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; @@ -64,7 +65,7 @@ public void run_mockConfiguration() throws IOException { Mockito.when(configuration.buildTerminal()).thenReturn(terminal); Mockito.when(configuration.buildPrompt(terminal)).thenReturn("prompt"); - Mockito.when(configuration.buildLineReader(Mockito.eq(terminal), Mockito.any(Interpreter.class))) + Mockito.when(configuration.buildLineReader(Mockito.eq(terminal), ArgumentMatchers.anyCollection())) .thenReturn(lineReader); final InteractiveShellClient shellClient = new InteractiveShellClient(); diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java index 9d45cb02b..49d1a7250 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java @@ -42,16 +42,10 @@ private ShellTestUtils() { public static Interpreter getMockInterpreter(final Writer writer) { final Terminal terminalMock = Mockito.mock(Terminal.class); - final TerminalStyledPrinter terminalStyledPrinter = new TerminalStyledPrinter(terminalMock); final PrintWriter printWriter = new PrintWriter(writer); Mockito.when(terminalMock.writer()).thenReturn(printWriter); - final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); - return new Interpreter(Interpreter.EMPTY_KNOWLEDGE_BASE_PROVIDER, (knowledgeBase) -> { - final Reasoner reasoner = Mockito.mock(Reasoner.class); - Mockito.when(reasoner.getKnowledgeBase()).thenReturn(knowledgeBase); - return reasoner; - }, terminalStyledPrinter, parserConfiguration); + return getMockInterpreter(terminalMock); } public static Interpreter getMockInterpreter(final Terminal terminal) { diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index b9e7ef7e3..daecfcddd 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -81,7 +81,7 @@ public Interpreter(final KnowledgeBaseProvider knowledgeBaseProvider, final Reas final StyledPrinter printer, final ParserConfiguration parserConfiguration) { this.knowledgeBaseProvider = knowledgeBaseProvider; this.reasonerProvider = reasonerProvider; - clearReasonerAndKnowledgeBase(); + this.clearReasonerAndKnowledgeBase(); this.printer = printer; this.parserConfiguration = parserConfiguration; this.registerDefaultCommandInterpreters(); @@ -242,7 +242,7 @@ public static PositiveLiteral extractPositiveLiteralArgument(final Command comma * @return * @throws FileNotFoundException */ - public Writer getFileWriter(String fileName) throws FileNotFoundException { + public Writer getFileWriter(final String fileName) throws FileNotFoundException { return new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), StandardCharsets.UTF_8)); } @@ -253,7 +253,7 @@ public Writer getFileWriter(String fileName) throws FileNotFoundException { * @return * @throws FileNotFoundException */ - public InputStream getFileInputStream(String fileName) throws FileNotFoundException { + public InputStream getFileInputStream(final String fileName) throws FileNotFoundException { return new FileInputStream(fileName); } @@ -262,11 +262,11 @@ public InputStream getFileInputStream(String fileName) throws FileNotFoundExcept * statements are cleared. */ public void clearReasonerAndKnowledgeBase() { - closeReasoner(); - reasoner = reasonerProvider.reasoner(knowledgeBaseProvider.knowledgeBase()); + this.closeReasoner(); + this.reasoner = this.reasonerProvider.reasoner(this.knowledgeBaseProvider.knowledgeBase()); try { - reasoner.reason(); - } catch (IOException e) { + this.reasoner.reason(); + } catch (final IOException e) { throw new RulewerkRuntimeException("Failed to initialise reasoner: " + e.getMessage(), e); } } @@ -276,16 +276,16 @@ public void clearReasonerAndKnowledgeBase() { */ @Override public void close() { - closeReasoner(); + this.closeReasoner(); } /** * Closes and discards the internal {@link Reasoner}. */ private void closeReasoner() { - if (reasoner != null) { - reasoner.close(); - reasoner = null; + if (this.reasoner != null) { + this.reasoner.close(); + this.reasoner = null; } } From 1c69d5ce6559d7df1a22f39fddf4d9ead06ec524 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 1 Sep 2020 22:46:37 +0200 Subject: [PATCH 0799/1003] delete test output file binaryFacts.csv --- rulewerk-vlog/src/test/data/output/binaryFacts.csv | 2 -- 1 file changed, 2 deletions(-) delete mode 100644 rulewerk-vlog/src/test/data/output/binaryFacts.csv diff --git a/rulewerk-vlog/src/test/data/output/binaryFacts.csv b/rulewerk-vlog/src/test/data/output/binaryFacts.csv deleted file mode 100644 index bcaabc2bc..000000000 --- a/rulewerk-vlog/src/test/data/output/binaryFacts.csv +++ /dev/null @@ -1,2 +0,0 @@ -c1,c2 -c3,c4 From bdf89b0fddcdebe198f5477a5235ef83268892a1 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 1 Sep 2020 22:47:20 +0200 Subject: [PATCH 0800/1003] delete unit test output file --- rulewerk-vlog/src/test/data/output/exclude_blanks.csv | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 rulewerk-vlog/src/test/data/output/exclude_blanks.csv diff --git a/rulewerk-vlog/src/test/data/output/exclude_blanks.csv b/rulewerk-vlog/src/test/data/output/exclude_blanks.csv deleted file mode 100644 index e69de29bb..000000000 From fdefff5db5cf91cbaf98f7fd7084b32372b278c4 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 1 Sep 2020 22:47:38 +0200 Subject: [PATCH 0801/1003] delete unit test output file --- rulewerk-vlog/src/test/data/output/include_blanks.csv | 2 -- 1 file changed, 2 deletions(-) delete mode 100644 rulewerk-vlog/src/test/data/output/include_blanks.csv diff --git a/rulewerk-vlog/src/test/data/output/include_blanks.csv b/rulewerk-vlog/src/test/data/output/include_blanks.csv deleted file mode 100644 index e502cf529..000000000 --- a/rulewerk-vlog/src/test/data/output/include_blanks.csv +++ /dev/null @@ -1,2 +0,0 @@ -c,1_2_0 -c,1_3_0 From 3969b5868f78841e74914bdf49a36e59e2d2b3b2 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 1 Sep 2020 22:47:59 +0200 Subject: [PATCH 0802/1003] delete unit test output file --- rulewerk-vlog/src/test/data/output/unaryFacts.csv | 2 -- 1 file changed, 2 deletions(-) delete mode 100644 rulewerk-vlog/src/test/data/output/unaryFacts.csv diff --git a/rulewerk-vlog/src/test/data/output/unaryFacts.csv b/rulewerk-vlog/src/test/data/output/unaryFacts.csv deleted file mode 100644 index d0aaf976a..000000000 --- a/rulewerk-vlog/src/test/data/output/unaryFacts.csv +++ /dev/null @@ -1,2 +0,0 @@ -c1 -c2 From 8f54de9ed624413e16727b4455715e6d08b4ad9b Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 2 Sep 2020 20:04:17 +0200 Subject: [PATCH 0803/1003] allow duplicate imports --- .../semanticweb/rulewerk/core/reasoner/KnowledgeBase.java | 8 ++++---- .../org/semanticweb/rulewerk/parser/RuleParserTest.java | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java index 6936811e7..a7bd76fc2 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -490,10 +490,10 @@ public void importRulesFile(File file, AdditionalInputParser parseFunction) Validate.notNull(file, "file must not be null"); boolean isNewFile = this.importedFilePaths.add(file.getCanonicalPath()); - Validate.isTrue(isNewFile, "file \"" + file.getName() + "\" was already imported."); - - try (InputStream stream = new FileInputStream(file)) { - parseFunction.parseInto(stream, this); + if (isNewFile) { + try (InputStream stream = new FileInputStream(file)) { + parseFunction.parseInto(stream, this); + } } } diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java index a7b3be68a..3b3b77b60 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java @@ -536,15 +536,15 @@ public void parse_reusedNamedNulls_identical() throws ParsingException { assertArgumentIsNamedNull(fact3, 1); } - @Test(expected = ParsingException.class) - public void parseInto_duplicateImportStatements_throws() throws ParsingException { + @Test + public void parseInto_duplicateImportStatements_succeeds() throws ParsingException { String input = "@import \"src/test/resources/facts.rls\" . "; KnowledgeBase knowledgeBase = RuleParser.parse(input); RuleParser.parseInto(knowledgeBase, input); } - @Test(expected = ParsingException.class) - public void parseInto_duplicateRelativeImportStatements_throws() throws ParsingException { + @Test + public void parseInto_duplicateRelativeImportStatements_succeeds() throws ParsingException { String input = "@import \"src/test/resources/facts.rls\" . @import-relative \"src/test/resources/facts.rls\" ."; KnowledgeBase knowledgeBase = RuleParser.parse(input); RuleParser.parseInto(knowledgeBase, input); From 2cbc5022710f030f9ce1c326951aed62ea162faa Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 2 Sep 2020 20:08:50 +0200 Subject: [PATCH 0804/1003] set reasoner log level to Error --- .../java/org/semanticweb/rulewerk/commands/Interpreter.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index daecfcddd..c14dc1c4c 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -42,6 +42,7 @@ import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Terms; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.LogLevel; import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.parser.ParserConfiguration; import org.semanticweb.rulewerk.parser.ParsingException; @@ -264,6 +265,7 @@ public InputStream getFileInputStream(final String fileName) throws FileNotFound public void clearReasonerAndKnowledgeBase() { this.closeReasoner(); this.reasoner = this.reasonerProvider.reasoner(this.knowledgeBaseProvider.knowledgeBase()); + this.reasoner.setLogLevel(LogLevel.ERROR); try { this.reasoner.reason(); } catch (final IOException e) { From 5ed733eff4974799b4b554b8a8f2eaecb54ac4c5 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Wed, 2 Sep 2020 20:12:54 +0200 Subject: [PATCH 0805/1003] added javadoc --- .../rulewerk/client/picocli/Main.java | 57 ++++++++++++++----- .../client/picocli/RulewerkClient.java | 46 --------------- .../shell/DefaultShellConfiguration.java | 7 +++ .../client/shell/InteractiveShellClient.java | 20 ++++++- .../rulewerk/client/shell/Shell.java | 7 ++- .../client/shell/ShellConfiguration.java | 28 +++++++++ .../client/shell/TerminalStyledPrinter.java | 42 +++++++++----- .../commands/ExitCommandInterpreter.java | 17 ++++++ .../shell/InteractiveShellClientTest.java | 2 +- .../rulewerk/commands/StyledPrinter.java | 10 ++++ 10 files changed, 156 insertions(+), 80 deletions(-) delete mode 100644 rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClient.java diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java index 0772d76e7..f8b59bb0a 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java @@ -21,6 +21,7 @@ */ import java.io.IOException; +import java.io.PrintStream; import org.apache.log4j.ConsoleAppender; import org.apache.log4j.Level; @@ -33,8 +34,8 @@ import picocli.CommandLine.Command; /** - * Dummy class with main method that is a command with subcommands shell and - * materialize + * Class with main method that is a command with subcommands {@code shell} + * (default) and {@code materialize}. * * @author Irina Dragoste * @@ -43,28 +44,56 @@ RulewerkClientMaterialize.class }) public class Main { + public static String INTERACTIVE_SHELL_COMMAND = "shell"; + public static String COMMAND_LINE_CLIENT_COMMAND = "materialize"; + public static String HELP_COMMAND = "help"; + + /** + * Launches the client application for Rulewerk. The functionality depends on + * the given command-line args ({@code args}): + *

      + *
    • empty args ("") or argument "shell"
    • launch an + * interactive shell. + *
    • argument "materialize" can be used with different options to complete + * several materialization and querying tasks from the command line.
    • + *
    + *
  • help
  • + * + * @param args + * + * @throws IOException + */ public static void main(final String[] args) throws IOException { configureLogging(); - - if (args.length == 0 || (args.length > 0 && args[0].equals("shell"))) { - new InteractiveShellClient().run(new DefaultShellConfiguration()); + + if (args.length == 0 || (args.length > 0 && INTERACTIVE_SHELL_COMMAND.equals(args[0]))) { + new InteractiveShellClient().launchShell(new DefaultShellConfiguration()); } else { - if (args[0].equals("materialize")) { + if (COMMAND_LINE_CLIENT_COMMAND.equals(args[0])) { final CommandLine commandline = new CommandLine(new RulewerkClientMaterialize()); commandline.execute(args); } else { - if (!args[0].equals("help")) { - System.out.println("Invalid command."); - } - // TODO improve help - // TODO do we need to create a Help command? - (new CommandLine(new Main())).usage(System.out); - + displayHelp(args, System.out); } } + } + static void displayHelp(final String[] args, final PrintStream printStream) { + if (!HELP_COMMAND.equals(args[0])) { + printStream.println("Invalid command."); + } + + if (HELP_COMMAND.equals(args[0]) && args.length > 1 && COMMAND_LINE_CLIENT_COMMAND.equals(args[1])) { + (new CommandLine(new RulewerkClientMaterialize())).usage(printStream); + } else { + (new CommandLine(new Main())).usage(printStream); + } } - + + /** + * Configures {@link Logger} settings. Messages are logged to the console. Log + * level is set to {@link Level.FATAL}. + */ public static void configureLogging() { // Create the appender that will write log messages to the console. final ConsoleAppender consoleAppender = new ConsoleAppender(); diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClient.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClient.java deleted file mode 100644 index ee48b9beb..000000000 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClient.java +++ /dev/null @@ -1,46 +0,0 @@ -package org.semanticweb.rulewerk.client.picocli; - -/*- - * #%L - * Rulewerk Client - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import picocli.CommandLine; - -import picocli.CommandLine.Command; - -/** - * Stand alone client for Rulewerk. - * - * @author Larry Gonzalez - * - */ -@Command(name = "java -jar RulewerkClient.jar", description = "RulewerkClient: A command line client for Rulewerk.", subcommands = { - RulewerkClientMaterialize.class }) -public class RulewerkClient implements Runnable { - - public static void main(String[] args) { - CommandLine commandline = new CommandLine(new RulewerkClient()); - commandline.execute(args); - } - - @Override - public void run() { - (new CommandLine(new RulewerkClient())).usage(System.out); - } -} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java index d108ea6c0..bc93de9c6 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java @@ -36,6 +36,13 @@ import org.jline.utils.AttributedString; import org.jline.utils.AttributedStyle; +/** + * An implementation of {@link ShellConfiguration} with custom styling and + * completion for recognized commands. + * + * @author Irina Dragoste + * + */ public class DefaultShellConfiguration implements ShellConfiguration { public static final String PROMPT_STRING = "rulewerk> "; diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClient.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClient.java index cd62e53a5..e74a7a189 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClient.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClient.java @@ -31,18 +31,32 @@ import picocli.CommandLine.Command; -@Command(name = "shell", description = "An interactive shell for Rulewerk. The default command.") +/** + * Class for executing the default {@code shell} command, which launches an + * interactive shell. + * + * @author Irina Dragoste + * + */ +@Command(name = "shell", description = "Launch an interactive shell for Rulewerk. The default command.") public class InteractiveShellClient { - public void run(final ShellConfiguration configuration) throws IOException { + /** + * Builds and launches an interactive shell, which accepts commands for running + * Rulewerk tasks using VLog Reasosner. + * + * @param configuration for shell I/O resources + * @throws IOException if {@link Terminal} cannot be built. + */ + public void launchShell(final ShellConfiguration configuration) throws IOException { final Terminal terminal = configuration.buildTerminal(); try (Interpreter interpreter = this.initializeInterpreter(terminal)) { final Shell shell = new Shell(interpreter); - final LineReader lineReader = configuration.buildLineReader(terminal, shell.getRegisteredCommands()); + final LineReader lineReader = configuration.buildLineReader(terminal, shell.getCommands()); final String prompt = configuration.buildPrompt(terminal); shell.run(lineReader, prompt); diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index 665a0a1df..103d6abb1 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -151,7 +151,12 @@ boolean isRunning() { return this.running; } - public Set getRegisteredCommands() { + /** + * Getter for the shell commands. + * + * @return the names of the commands that are recognized by this shell. + */ + public Set getCommands() { return this.interpreter.getRegisteredCommands(); } diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java index 9c83ae976..fc9e42e04 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java @@ -26,12 +26,40 @@ import org.jline.reader.LineReader; import org.jline.terminal.Terminal; +/** + * Interface for providing I/O resources for an interactive shell: terminal, + * terminal prompt, and line reader + * + * @author Irina Dragoste + * + */ public interface ShellConfiguration { + /** + * Provides a line reader that reads user input from the given terminal. The + * line reader offers tab-completion for the given list of command names. + * + * @param terminal terminal to read from. + * @param commands list of command names recognized by the interactive shell. + * @return a line reader for interacting with the shell terminal. + */ LineReader buildLineReader(Terminal terminal, Collection commands); + /** + * Provides an I/O terminal for the interactive shell. + * + * @return the interactive shell terminal. + * @throws IOException when the terminal cannot be built + */ Terminal buildTerminal() throws IOException; + /** + * Provides the prompt text (with colour and style) to be displayed on the given + * terminal. + * + * @param terminal terminal for the prompt to be displayed on + * @return the prompt text with embedded style. + */ String buildPrompt(Terminal terminal); } diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/TerminalStyledPrinter.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/TerminalStyledPrinter.java index d481f2c3a..4bf7e91ea 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/TerminalStyledPrinter.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/TerminalStyledPrinter.java @@ -27,47 +27,59 @@ import org.jline.utils.AttributedStyle; import org.semanticweb.rulewerk.commands.StyledPrinter; +/** + * StyledPrinter that uses the {@link PrintWriter} of a {@link Terminal} and has + * various styling. + * + * @author Irina Dragoste + * + */ public class TerminalStyledPrinter implements StyledPrinter { final Terminal terminal; + /** + * Constructor providing a terminal for the StyledPrinter to write to. + * + * @param terminal the terminal to write to + */ public TerminalStyledPrinter(final Terminal terminal) { this.terminal = terminal; } @Override - public void printNormal(String string) { - printStyled(string, AttributedStyle.DEFAULT); + public void printNormal(final String string) { + this.printStyled(string, AttributedStyle.DEFAULT); } @Override - public void printSection(String string) { - printStyled(string, AttributedStyle.DEFAULT.bold()); + public void printSection(final String string) { + this.printStyled(string, AttributedStyle.DEFAULT.bold()); } @Override - public void printEmph(String string) { - printStyled(string, AttributedStyle.DEFAULT.bold()); + public void printEmph(final String string) { + this.printStyled(string, AttributedStyle.DEFAULT.bold()); } @Override - public void printCode(String string) { - printStyled(string, AttributedStyle.DEFAULT.foreground(AttributedStyle.YELLOW).bold()); + public void printCode(final String string) { + this.printStyled(string, AttributedStyle.DEFAULT.foreground(AttributedStyle.YELLOW).bold()); } @Override - public void printImportant(String string) { - printStyled(string, AttributedStyle.DEFAULT.foreground(AttributedStyle.RED)); + public void printImportant(final String string) { + this.printStyled(string, AttributedStyle.DEFAULT.foreground(AttributedStyle.RED)); } @Override public PrintWriter getWriter() { - return terminal.writer(); + return this.terminal.writer(); } - private void printStyled(String string, AttributedStyle attributedStyle) { - AttributedString attributedString = new AttributedString(string, attributedStyle); - getWriter().print(attributedString.toAnsi(terminal)); - getWriter().flush(); + private void printStyled(final String string, final AttributedStyle attributedStyle) { + final AttributedString attributedString = new AttributedString(string, attributedStyle); + this.getWriter().print(attributedString.toAnsi(this.terminal)); + this.getWriter().flush(); } } diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java index 706275678..b6506b81b 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java @@ -28,16 +28,33 @@ import org.semanticweb.rulewerk.commands.Interpreter; import org.semanticweb.rulewerk.core.model.api.Command; +/** + * Interpreter for the command to exit an interactive shell + * + * @author Irina Dragoste + * + */ public class ExitCommandInterpreter implements CommandInterpreter { public static final Command EXIT_COMMAND = new Command(ExitCommandName.exit.toString(), new ArrayList<>(0)); + /** + * Command names used for requesting exiting an interactive shell + * + * @author Irina Dragoste + * + */ public static enum ExitCommandName { exit; } final Shell shell; + /** + * Constructor that provides the interactive shell from which exit is requested + * + * @param shell interactive shell to exit from + */ public ExitCommandInterpreter(final Shell shell) { this.shell = shell; } diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClientTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClientTest.java index e37722070..5d328751f 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClientTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClientTest.java @@ -69,7 +69,7 @@ public void run_mockConfiguration() throws IOException { .thenReturn(lineReader); final InteractiveShellClient shellClient = new InteractiveShellClient(); - shellClient.run(configuration); + shellClient.launchShell(configuration); assertTrue(output.toString().contains("Welcome to the Rulewerk interactive shell.")); diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/StyledPrinter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/StyledPrinter.java index 8e642b597..ebaf2867d 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/StyledPrinter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/StyledPrinter.java @@ -22,6 +22,12 @@ import java.io.Writer; +/** + * Interface for printing given Strings to a writer using different styles. + * + * @author Irina Dragoste + * + */ public interface StyledPrinter { void printNormal(String string); @@ -34,6 +40,10 @@ public interface StyledPrinter { void printImportant(String string); + /** + * + * @return the writer to print to + */ Writer getWriter(); } From 765b08c06c333c07270a4bc448dbea7c80b8a590 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 2 Sep 2020 21:18:26 +0200 Subject: [PATCH 0806/1003] better error reporting --- .../semanticweb/rulewerk/commands/Interpreter.java | 12 ++++++++---- .../commands/LoadCommandInterpreterTest.java | 9 +++++++++ 2 files changed, 17 insertions(+), 4 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index c14dc1c4c..1be04b71b 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -215,7 +215,7 @@ public static String extractStringArgument(final Command command, final int inde try { return Terms.extractString(command.getArguments().get(index).fromTerm() .orElseThrow(() -> getArgumentTypeError(index, "string", parameterName))); - } catch (final IllegalArgumentException e) { + } catch (final IllegalArgumentException | IndexOutOfBoundsException e) { throw getArgumentTypeError(index, "string", parameterName); } } @@ -225,15 +225,19 @@ public static String extractNameArgument(final Command command, final int index, try { return Terms.extractName(command.getArguments().get(index).fromTerm() .orElseThrow(() -> getArgumentTypeError(index, "constant", parameterName))); - } catch (final IllegalArgumentException e) { + } catch (final IllegalArgumentException | IndexOutOfBoundsException e) { throw getArgumentTypeError(index, "constant", parameterName); } } public static PositiveLiteral extractPositiveLiteralArgument(final Command command, final int index, final String parameterName) throws CommandExecutionException { - return command.getArguments().get(index).fromPositiveLiteral() - .orElseThrow(() -> getArgumentTypeError(index, "literal", parameterName)); + try { + return command.getArguments().get(index).fromPositiveLiteral() + .orElseThrow(() -> getArgumentTypeError(index, "literal", parameterName)); + } catch (final IndexOutOfBoundsException e) { + throw getArgumentTypeError(index, "constant", parameterName); + } } /** diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java index ae4d121a2..ee3d1ac42 100644 --- a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java @@ -296,6 +296,15 @@ public void wrongArgumentCount_fails() throws ParsingException, CommandExecution Command command = interpreter.parseCommand("@load ."); interpreter.runCommand(command); } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCountWithOptional_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@load OWL ."); + interpreter.runCommand(command); + } @Test(expected = CommandExecutionException.class) public void wrongRdfPredicateTermType_fails() throws ParsingException, CommandExecutionException { From a07d3b9bc07ba3d86b7bcd4c9676662394eab534 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Wed, 2 Sep 2020 21:47:22 +0200 Subject: [PATCH 0807/1003] completers for @load, @export and @clear --- .../shell/DefaultShellConfiguration.java | 40 +++++++++---- .../commands/ClearCommandInterpreter.java | 24 ++++---- .../commands/ExportCommandInterpreter.java | 28 ++++----- .../commands/LoadCommandInterpreter.java | 60 +++++++++---------- 4 files changed, 86 insertions(+), 66 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java index bc93de9c6..f893512f7 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java @@ -26,8 +26,8 @@ import java.util.List; import org.jline.builtins.Completers; -import org.jline.builtins.Completers.FileNameCompleter; import org.jline.builtins.Completers.TreeCompleter; +import org.jline.builtins.Completers.TreeCompleter.Node; import org.jline.reader.LineReader; import org.jline.reader.LineReaderBuilder; import org.jline.reader.impl.completer.StringsCompleter; @@ -35,6 +35,9 @@ import org.jline.terminal.TerminalBuilder; import org.jline.utils.AttributedString; import org.jline.utils.AttributedStyle; +import org.semanticweb.rulewerk.commands.ClearCommandInterpreter; +import org.semanticweb.rulewerk.commands.ExportCommandInterpreter; +import org.semanticweb.rulewerk.commands.LoadCommandInterpreter; /** * An implementation of {@link ShellConfiguration} with custom styling and @@ -69,21 +72,38 @@ LineReaderBuilder getDefaultLineReaderConfiguration(final LineReaderBuilder line } TreeCompleter buildCompleter(final Collection registeredCommands) { -// @load and @export commands require a file name as argument - final FileNameCompleter fileNameCompleter = new Completers.FileNameCompleter(); + final Node fileNameCompleterNode = TreeCompleter.node(new Completers.FileNameCompleter()); final List nodes = new ArrayList<>(); - registeredCommands.stream().map(commandName -> "@" + commandName).forEach(serializedCommandName -> { - if (serializedCommandName.equals("@load")) { - nodes.add(TreeCompleter.node(serializedCommandName, TreeCompleter.node(fileNameCompleter))); - } else if (serializedCommandName.equals("@help")) { - nodes.add(TreeCompleter.node(serializedCommandName, - TreeCompleter.node(new StringsCompleter(registeredCommands)))); + registeredCommands.stream().map(command -> "@" + command).forEach(commandName -> { + if (commandName.equals("@load")) { + nodes.add(TreeCompleter.node(commandName, fileNameCompleterNode)); + + final StringsCompleter taskOptionsCompleter = new StringsCompleter(LoadCommandInterpreter.TASK_OWL, + LoadCommandInterpreter.TASK_RDF, LoadCommandInterpreter.TASK_RLS); + nodes.add(TreeCompleter.node(commandName, + TreeCompleter.node(taskOptionsCompleter, fileNameCompleterNode))); + } else if (commandName.equals("@export")) { + final StringsCompleter taskOptionsCompleter = new StringsCompleter( + ExportCommandInterpreter.TASK_INFERENCES, ExportCommandInterpreter.TASK_KB + ); + nodes.add(TreeCompleter.node(commandName, + TreeCompleter.node(taskOptionsCompleter, fileNameCompleterNode))); + } else if (commandName.equals("@clear")) { + final StringsCompleter taskOptionsCompleter = new StringsCompleter(ClearCommandInterpreter.TASK_ALL, + ClearCommandInterpreter.TASK_INFERENCES, ClearCommandInterpreter.TASK_FACTS, + ClearCommandInterpreter.TASK_PREFIXES, ClearCommandInterpreter.TASK_RULES, + ClearCommandInterpreter.TASK_SOURCES); + nodes.add(TreeCompleter.node(commandName, TreeCompleter.node(taskOptionsCompleter))); + } else if (commandName.equals("@help")) { + nodes.add( + TreeCompleter.node(commandName, TreeCompleter.node(new StringsCompleter(registeredCommands)))); } else { - nodes.add(TreeCompleter.node(serializedCommandName)); + nodes.add(TreeCompleter.node(commandName)); } }); return new TreeCompleter(nodes); + } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java index b27bda588..3c70b9744 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java @@ -27,17 +27,17 @@ public class ClearCommandInterpreter implements CommandInterpreter { - static final String TASK_ALL = "ALL"; - static final String TASK_INFERENCES = "INF"; - static final String TASK_FACTS = "FACTS"; - static final String TASK_RULES = "RULES"; - static final String TASK_SOURCES = "DATASOURCES"; - static final String TASK_PREFIXES = "PREFIXES"; + public static final String TASK_ALL = "ALL"; + public static final String TASK_INFERENCES = "INF"; + public static final String TASK_FACTS = "FACTS"; + public static final String TASK_RULES = "RULES"; + public static final String TASK_SOURCES = "DATASOURCES"; + public static final String TASK_PREFIXES = "PREFIXES"; @Override - public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + public void run(final Command command, final Interpreter interpreter) throws CommandExecutionException { Interpreter.validateArgumentCount(command, 1); - String task = Interpreter.extractNameArgument(command, 0, "task"); + final String task = Interpreter.extractNameArgument(command, 0, "task"); if (TASK_ALL.equals(task)) { interpreter.clearReasonerAndKnowledgeBase(); interpreter.printNormal("Knowledge base has been cleared; reasoner has been completely reset.\n"); @@ -45,17 +45,17 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio interpreter.getReasoner().resetReasoner(); interpreter.printNormal("Reasoner has been reset.\n"); } else if (TASK_FACTS.equals(task)) { - for (Fact fact : interpreter.getKnowledgeBase().getFacts()) { + for (final Fact fact : interpreter.getKnowledgeBase().getFacts()) { interpreter.getKnowledgeBase().removeStatement(fact); } interpreter.printNormal("All facts have been removed from the knowledge base.\n"); } else if (TASK_RULES.equals(task)) { - for (Rule rule : interpreter.getKnowledgeBase().getRules()) { + for (final Rule rule : interpreter.getKnowledgeBase().getRules()) { interpreter.getKnowledgeBase().removeStatement(rule); } interpreter.printNormal("All rules have been removed from the knowledge base.\n"); } else if (TASK_SOURCES.equals(task)) { - for (DataSourceDeclaration dataSourceDeclaration : interpreter.getKnowledgeBase() + for (final DataSourceDeclaration dataSourceDeclaration : interpreter.getKnowledgeBase() .getDataSourceDeclarations()) { interpreter.getKnowledgeBase().removeStatement(dataSourceDeclaration); } @@ -71,7 +71,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } @Override - public void printHelp(String commandName, Interpreter interpreter) { + public void printHelp(final String commandName, final Interpreter interpreter) { interpreter.printNormal("Usage: @" + commandName + " TASK\n" // + " TASK: what to reset, possuble values:\n" // + " ALL: empty knowledge base and completely reset reasoner\n" // diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java index 4ead3798d..3cffcf25f 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java @@ -29,20 +29,20 @@ public class ExportCommandInterpreter implements CommandInterpreter { - static final String TASK_KB = "KB"; - static final String TASK_INFERENCES = "INFERENCES"; + public static final String TASK_KB = "KB"; + public static final String TASK_INFERENCES = "INFERENCES"; @Override - public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + public void run(final Command command, final Interpreter interpreter) throws CommandExecutionException { Interpreter.validateArgumentCount(command, 2); - String task = Interpreter.extractNameArgument(command, 0, "task"); - String fileName = Interpreter.extractStringArgument(command, 1, "filename"); + final String task = Interpreter.extractNameArgument(command, 0, "task"); + final String fileName = Interpreter.extractStringArgument(command, 1, "filename"); if (TASK_KB.equals(task)) { - exportKb(interpreter, fileName); + this.exportKb(interpreter, fileName); } else if (TASK_INFERENCES.equals(task)) { - exportInferences(interpreter, fileName); + this.exportInferences(interpreter, fileName); } else { throw new CommandExecutionException( "Unknown task " + task + ". Should be " + TASK_KB + " or " + TASK_INFERENCES); @@ -51,7 +51,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } @Override - public void printHelp(String commandName, Interpreter interpreter) { + public void printHelp(final String commandName, final Interpreter interpreter) { interpreter.printNormal("Usage: @" + commandName + " TASK \"filename\" .\n" // + " TASK: what to export; can be KB or INFERENCES\n" // + " \"filename\": string path export file (suggested extension: .rls)\n"); @@ -62,14 +62,14 @@ public String getSynopsis() { return "export knowledgebase or inferences to a Rulewerk file"; } - private void exportInferences(Interpreter interpreter, String fileName) throws CommandExecutionException { - Timer timer = new Timer("export"); + private void exportInferences(final Interpreter interpreter, final String fileName) throws CommandExecutionException { + final Timer timer = new Timer("export"); Correctness correctness; try (Writer writer = interpreter.getFileWriter(fileName)) { timer.start(); correctness = interpreter.getReasoner().writeInferences(writer); timer.stop(); - } catch (IOException e) { + } catch (final IOException e) { throw new CommandExecutionException(e.getMessage(), e); } @@ -78,13 +78,13 @@ private void exportInferences(Interpreter interpreter, String fileName) throws C interpreter.printNormal(" This result is " + correctness + ".\n"); } - private void exportKb(Interpreter interpreter, String fileName) throws CommandExecutionException { - Timer timer = new Timer("export"); + private void exportKb(final Interpreter interpreter, final String fileName) throws CommandExecutionException { + final Timer timer = new Timer("export"); try (Writer writer = interpreter.getFileWriter(fileName)) { timer.start(); interpreter.getKnowledgeBase().writeKnowledgeBase(writer); timer.stop(); - } catch (IOException e) { + } catch (final IOException e) { throw new CommandExecutionException(e.getMessage(), e); } interpreter.printNormal("Exported knowledge base in " + timer.getTotalWallTime() / 1000000 + "ms (" diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index 952f3060c..1fe89e2e3 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -57,14 +57,14 @@ */ public class LoadCommandInterpreter implements CommandInterpreter { - static final String TASK_RLS = "RULES"; - static final String TASK_OWL = "OWL"; - static final String TASK_RDF = "RDF"; + public static final String TASK_RLS = "RULES"; + public static final String TASK_OWL = "OWL"; + public static final String TASK_RDF = "RDF"; static final String PREDICATE_ABOX = "ABOX"; @Override - public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + public void run(final Command command, final Interpreter interpreter) throws CommandExecutionException { String task; int pos = 0; if (command.getArguments().size() > 0 && command.getArguments().get(0).fromTerm().isPresent() @@ -75,7 +75,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio task = TASK_RLS; } - String fileName = Interpreter.extractStringArgument(command, pos, "filename"); + final String fileName = Interpreter.extractStringArgument(command, pos, "filename"); pos++; String rdfTriplePredicate = RdfModelConverter.RDF_TRIPLE_PREDICATE_NAME; @@ -94,16 +94,16 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio Interpreter.validateArgumentCount(command, pos); - int countRulesBefore = interpreter.getKnowledgeBase().getRules().size(); - int countFactsBefore = interpreter.getKnowledgeBase().getFacts().size(); - int countDataSourceDeclarationsBefore = interpreter.getKnowledgeBase().getDataSourceDeclarations().size(); + final int countRulesBefore = interpreter.getKnowledgeBase().getRules().size(); + final int countFactsBefore = interpreter.getKnowledgeBase().getFacts().size(); + final int countDataSourceDeclarationsBefore = interpreter.getKnowledgeBase().getDataSourceDeclarations().size(); if (TASK_RLS.equals(task)) { - loadKb(interpreter, fileName); + this.loadKb(interpreter, fileName); } else if (TASK_OWL.equals(task)) { - loadOwl(interpreter, fileName); + this.loadOwl(interpreter, fileName); } else if (TASK_RDF.equals(task)) { - loadRdf(interpreter, fileName, rdfTriplePredicate); + this.loadRdf(interpreter, fileName, rdfTriplePredicate); } else { throw new CommandExecutionException( "Unknown task " + task + ". Should be one of " + TASK_RLS + ", " + TASK_OWL + ", " + TASK_RDF); @@ -117,23 +117,23 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } - private void loadKb(Interpreter interpreter, String fileName) throws CommandExecutionException { + private void loadKb(final Interpreter interpreter, final String fileName) throws CommandExecutionException { try { - InputStream inputStream = interpreter.getFileInputStream(fileName); + final InputStream inputStream = interpreter.getFileInputStream(fileName); RuleParser.parseInto(interpreter.getKnowledgeBase(), inputStream); - } catch (FileNotFoundException e) { + } catch (final FileNotFoundException e) { throw new CommandExecutionException(e.getMessage(), e); - } catch (ParsingException e) { + } catch (final ParsingException e) { throw new CommandExecutionException("Failed to parse Rulewerk file: " + e.getMessage(), e); } } - private void loadOwl(Interpreter interpreter, String fileName) throws CommandExecutionException { + private void loadOwl(final Interpreter interpreter, final String fileName) throws CommandExecutionException { final OWLOntologyManager ontologyManager = OWLManager.createOWLOntologyManager(); OWLOntology ontology; try { ontology = ontologyManager.loadOntologyFromOntologyDocument(new File(fileName)); - } catch (OWLOntologyCreationException e) { + } catch (final OWLOntologyCreationException e) { throw new CommandExecutionException("Problem loading OWL ontology: " + e.getMessage(), e); } interpreter.printNormal( @@ -157,20 +157,20 @@ private void loadOwl(Interpreter interpreter, String fileName) throws CommandExe interpreter.getKnowledgeBase().addStatements(owlToRulesConverter.getFacts()); } - private void loadRdf(Interpreter interpreter, String fileName, String triplePredicateName) + private void loadRdf(final Interpreter interpreter, final String fileName, final String triplePredicateName) throws CommandExecutionException { try { - String baseIri = new File(fileName).toURI().toString(); + final String baseIri = new File(fileName).toURI().toString(); - Iterator formatsToTry = Arrays.asList(RDFFormat.NTRIPLES, RDFFormat.TURTLE, RDFFormat.RDFXML) - .iterator(); + final Iterator formatsToTry = Arrays + .asList(RDFFormat.NTRIPLES, RDFFormat.TURTLE, RDFFormat.RDFXML).iterator(); Model model = null; - List parseErrors = new ArrayList<>(); + final List parseErrors = new ArrayList<>(); while (model == null && formatsToTry.hasNext()) { - RDFFormat rdfFormat = formatsToTry.next(); + final RDFFormat rdfFormat = formatsToTry.next(); try { - InputStream inputStream = interpreter.getFileInputStream(fileName); - model = parseRdfFromStream(inputStream, rdfFormat, baseIri); + final InputStream inputStream = interpreter.getFileInputStream(fileName); + model = this.parseRdfFromStream(inputStream, rdfFormat, baseIri); interpreter.printNormal("Found RDF document in format " + rdfFormat.getName() + " ...\n"); } catch (RDFParseException | RDFHandlerException e) { parseErrors.add("Failed to parse as " + rdfFormat.getName() + ": " + e.getMessage()); @@ -178,20 +178,20 @@ private void loadRdf(Interpreter interpreter, String fileName, String triplePred } if (model == null) { String message = "Failed to parse RDF input:"; - for (String error : parseErrors) { + for (final String error : parseErrors) { message += "\n " + error; } throw new CommandExecutionException(message); } - RdfModelConverter rdfModelConverter = new RdfModelConverter(true, triplePredicateName); + final RdfModelConverter rdfModelConverter = new RdfModelConverter(true, triplePredicateName); rdfModelConverter.addAll(interpreter.getKnowledgeBase(), model); - } catch (IOException e) { + } catch (final IOException e) { throw new CommandExecutionException("Could not read input: " + e.getMessage(), e); } } - private Model parseRdfFromStream(InputStream inputStream, RDFFormat rdfFormat, String baseIri) + private Model parseRdfFromStream(final InputStream inputStream, final RDFFormat rdfFormat, final String baseIri) throws RDFParseException, RDFHandlerException, IOException { final Model model = new LinkedHashModel(); final RDFParser rdfParser = Rio.createParser(rdfFormat); @@ -201,7 +201,7 @@ private Model parseRdfFromStream(InputStream inputStream, RDFFormat rdfFormat, S } @Override - public void printHelp(String commandName, Interpreter interpreter) { + public void printHelp(final String commandName, final Interpreter interpreter) { interpreter.printNormal("Usage: @" + commandName + " [TASK] \"file\" [RDF predicate]\n" // + " TASK: optional; one of RULES (default), OWL, RDF:\n" // + " RULES to load a knowledge base in Rulewerk rls format\n" // From 43f4826d899cd6572f2509a71e6f2bb945a16d94 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Wed, 2 Sep 2020 22:02:49 +0200 Subject: [PATCH 0808/1003] correct usage message --- .../commands/ExitCommandInterpreter.java | 2 +- .../commands/AddSourceCommandInterpreter.java | 188 ++++++------ .../commands/ClearCommandInterpreter.java | 2 +- .../commands/LoadCommandInterpreter.java | 2 +- .../RemoveSourceCommandInterpreter.java | 16 +- .../commands/RetractCommandInterpreter.java | 150 ++++----- .../commands/SetPrefixCommandInterpreter.java | 12 +- .../RetractCommandInterpreterTest.java | 288 +++++++++--------- 8 files changed, 330 insertions(+), 330 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java index b6506b81b..f9214afae 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java @@ -61,7 +61,7 @@ public ExitCommandInterpreter(final Shell shell) { @Override public void printHelp(final String commandName, final Interpreter interpreter) { - interpreter.printNormal("Usage: " + commandName + ".\n"); + interpreter.printNormal("Usage: @" + commandName + ".\n"); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java index bf7e2aad8..004023a65 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java @@ -1,94 +1,94 @@ -package org.semanticweb.rulewerk.commands; - -/*- - * #%L - * Rulewerk command execution support - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import org.semanticweb.rulewerk.core.model.api.Command; -import org.semanticweb.rulewerk.core.model.api.DataSource; -import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; -import org.semanticweb.rulewerk.core.model.api.Predicate; -import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.parser.ParsingException; - -public class AddSourceCommandInterpreter implements CommandInterpreter { - - @Override - public void run(Command command, Interpreter interpreter) throws CommandExecutionException { - Interpreter.validateArgumentCount(command, 2); - String predicateDeclaration = Interpreter.extractStringArgument(command, 0, "predicateName[arity]"); - PositiveLiteral sourceDeclaration = Interpreter.extractPositiveLiteralArgument(command, 1, - "source declaration"); - - Predicate predicate = extractPredicate(predicateDeclaration); - DataSource dataSource = extractDataSource(sourceDeclaration, interpreter); - - if (dataSource.getRequiredArity().isPresent()) { - Integer requiredArity = dataSource.getRequiredArity().get(); - if (predicate.getArity() != requiredArity) { - throw new CommandExecutionException("Invalid arity " + predicate.getArity() + " for data source, " - + "expected " + requiredArity + "."); - } - } - - interpreter.getKnowledgeBase().addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); - } - - @Override - public void printHelp(String commandName, Interpreter interpreter) { - interpreter.printNormal("Usage: @" + commandName + " []: .\n" - + " [] : the name of the predicate and its arity\n" - + " : a fact specifying a source declaration\n\n" - + "Note that every predicate can have multiple sources.\n"); - } - - @Override - public String getSynopsis() { - return "define a new external data source for a predicate"; - } - - static Predicate extractPredicate(String predicateDeclaration) throws CommandExecutionException { - String predicateName; - int arity; - try { - int openBracket = predicateDeclaration.indexOf('['); - int closeBracket = predicateDeclaration.indexOf(']'); - predicateName = predicateDeclaration.substring(0, openBracket); - String arityString = predicateDeclaration.substring(openBracket + 1, closeBracket); - arity = Integer.parseInt(arityString); - } catch (IndexOutOfBoundsException | NumberFormatException e) { - throw new CommandExecutionException( - "Predicate declaration must have the format \"predicateName[number]\" but was \"" - + predicateDeclaration + "\"."); - } - return Expressions.makePredicate(predicateName, arity); - } - - static DataSource extractDataSource(PositiveLiteral sourceDeclaration, Interpreter interpreter) - throws CommandExecutionException { - try { - return interpreter.getParserConfiguration() - .parseDataSourceSpecificPartOfDataSourceDeclaration(sourceDeclaration); - } catch (ParsingException e) { - throw new CommandExecutionException("Could not parse source declaration: " + e.getMessage()); - } - } - -} +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class AddSourceCommandInterpreter implements CommandInterpreter { + + @Override + public void run(final Command command, final Interpreter interpreter) throws CommandExecutionException { + Interpreter.validateArgumentCount(command, 2); + final String predicateDeclaration = Interpreter.extractStringArgument(command, 0, "predicateName[arity]"); + final PositiveLiteral sourceDeclaration = Interpreter.extractPositiveLiteralArgument(command, 1, + "source declaration"); + + final Predicate predicate = extractPredicate(predicateDeclaration); + final DataSource dataSource = extractDataSource(sourceDeclaration, interpreter); + + if (dataSource.getRequiredArity().isPresent()) { + final Integer requiredArity = dataSource.getRequiredArity().get(); + if (predicate.getArity() != requiredArity) { + throw new CommandExecutionException("Invalid arity " + predicate.getArity() + " for data source, " + + "expected " + requiredArity + "."); + } + } + + interpreter.getKnowledgeBase().addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); + } + + @Override + public void printHelp(final String commandName, final Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " []: .\n" + + " [] : the name of the predicate and its arity\n" + + " : a fact specifying a source declaration\n\n" + + "Note that every predicate can have multiple sources.\n"); + } + + @Override + public String getSynopsis() { + return "define a new external data source for a predicate"; + } + + static Predicate extractPredicate(final String predicateDeclaration) throws CommandExecutionException { + String predicateName; + int arity; + try { + final int openBracket = predicateDeclaration.indexOf('['); + final int closeBracket = predicateDeclaration.indexOf(']'); + predicateName = predicateDeclaration.substring(0, openBracket); + final String arityString = predicateDeclaration.substring(openBracket + 1, closeBracket); + arity = Integer.parseInt(arityString); + } catch (IndexOutOfBoundsException | NumberFormatException e) { + throw new CommandExecutionException( + "Predicate declaration must have the format \"predicateName[number]\" but was \"" + + predicateDeclaration + "\"."); + } + return Expressions.makePredicate(predicateName, arity); + } + + static DataSource extractDataSource(final PositiveLiteral sourceDeclaration, final Interpreter interpreter) + throws CommandExecutionException { + try { + return interpreter.getParserConfiguration() + .parseDataSourceSpecificPartOfDataSourceDeclaration(sourceDeclaration); + } catch (final ParsingException e) { + throw new CommandExecutionException("Could not parse source declaration: " + e.getMessage()); + } + } + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java index 3c70b9744..484d90c41 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java @@ -72,7 +72,7 @@ public void run(final Command command, final Interpreter interpreter) throws Com @Override public void printHelp(final String commandName, final Interpreter interpreter) { - interpreter.printNormal("Usage: @" + commandName + " TASK\n" // + interpreter.printNormal("Usage: @" + commandName + " TASK .\n" // + " TASK: what to reset, possuble values:\n" // + " ALL: empty knowledge base and completely reset reasoner\n" // + " INF: reset reasoner to clear all loaded data and inferences\n" // diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index 1fe89e2e3..c0a223524 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -202,7 +202,7 @@ private Model parseRdfFromStream(final InputStream inputStream, final RDFFormat @Override public void printHelp(final String commandName, final Interpreter interpreter) { - interpreter.printNormal("Usage: @" + commandName + " [TASK] \"file\" [RDF predicate]\n" // + interpreter.printNormal("Usage: @" + commandName + " [TASK] \"file\" [RDF predicate] .\n" // + " TASK: optional; one of RULES (default), OWL, RDF:\n" // + " RULES to load a knowledge base in Rulewerk rls format\n" // + " OWL to load an OWL ontology and convert it to facts and rules\n" // diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java index 9a7c606a4..c835bd635 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java @@ -30,22 +30,22 @@ public class RemoveSourceCommandInterpreter implements CommandInterpreter { @Override - public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + public void run(final Command command, final Interpreter interpreter) throws CommandExecutionException { if (command.getArguments().size() == 0 || command.getArguments().size() > 2) { throw new CommandExecutionException("This command requires one or two arguments."); } - String predicateDeclaration = Interpreter.extractStringArgument(command, 0, "predicateName[arity]"); - Predicate predicate = AddSourceCommandInterpreter.extractPredicate(predicateDeclaration); + final String predicateDeclaration = Interpreter.extractStringArgument(command, 0, "predicateName[arity]"); + final Predicate predicate = AddSourceCommandInterpreter.extractPredicate(predicateDeclaration); DataSource dataSource = null; if (command.getArguments().size() == 2) { - PositiveLiteral sourceDeclaration = Interpreter.extractPositiveLiteralArgument(command, 1, + final PositiveLiteral sourceDeclaration = Interpreter.extractPositiveLiteralArgument(command, 1, "source declaration"); dataSource = AddSourceCommandInterpreter.extractDataSource(sourceDeclaration, interpreter); } if (dataSource != null) { - DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, dataSource); + final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, dataSource); if (interpreter.getKnowledgeBase().removeStatement(dataSourceDeclaration) > 0) { interpreter.printNormal("Removed specified data source declaration.\n"); } else { @@ -53,7 +53,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } } else { int count = 0; - for (DataSourceDeclaration dataSourceDeclaration : interpreter.getKnowledgeBase() + for (final DataSourceDeclaration dataSourceDeclaration : interpreter.getKnowledgeBase() .getDataSourceDeclarations()) { if (dataSourceDeclaration.getPredicate().equals(predicate)) { interpreter.getKnowledgeBase().removeStatement(dataSourceDeclaration); @@ -66,8 +66,8 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } @Override - public void printHelp(String commandName, Interpreter interpreter) { - interpreter.printNormal("Usage: @" + commandName + " []: .\n" + public void printHelp(final String commandName, final Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " []: .\n" + " [] : the name of the predicate and its arity\n" + " (optional): a fact specifying a source declaration\n\n" + "Note that every predicate can have multiple sources.\n"); diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java index 2e74580c8..6add109ca 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java @@ -1,75 +1,75 @@ -package org.semanticweb.rulewerk.commands; - -/*- - * #%L - * Rulewerk command execution support - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import org.semanticweb.rulewerk.core.model.api.Argument; -import org.semanticweb.rulewerk.core.model.api.Command; -import org.semanticweb.rulewerk.core.model.api.Fact; -import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; -import org.semanticweb.rulewerk.core.model.api.Predicate; -import org.semanticweb.rulewerk.core.model.implementation.Expressions; - -public class RetractCommandInterpreter implements CommandInterpreter { - - @Override - public void run(Command command, Interpreter interpreter) throws CommandExecutionException { - int factCount = 0; - int ruleCount = 0; - for (Argument argument : command.getArguments()) { - if (argument.fromPositiveLiteral().isPresent()) { - PositiveLiteral literal = argument.fromPositiveLiteral().get(); - Fact fact; - try { - fact = Expressions.makeFact(literal.getPredicate(), literal.getArguments()); - } catch (IllegalArgumentException e) { - throw new CommandExecutionException("Literal " + literal.toString() + " is not a fact.", e); - } - factCount += interpreter.getKnowledgeBase().removeStatement(fact); - } else if (argument.fromRule().isPresent()) { - ruleCount += interpreter.getKnowledgeBase().removeStatement(argument.fromRule().get()); - } else { // implies argument.fromTerm().isPresent() - String predicateDeclaration = Interpreter.extractStringArgument(command, 0, "predicateName[arity]"); - Predicate predicate = AddSourceCommandInterpreter.extractPredicate(predicateDeclaration); - for (Fact fact : interpreter.getKnowledgeBase().getFacts()) { - if (predicate.equals(fact.getPredicate())) { - factCount += interpreter.getKnowledgeBase().removeStatement(fact); - } - } - } - } - - interpreter.printNormal("Retracted " + factCount + " fact(s) and " + ruleCount + " rule(s).\n"); - } - - @Override - public void printHelp(String commandName, Interpreter interpreter) { - interpreter.printNormal("Usage: @" + commandName + " ()+ .\n" - + " fact or rule: statement(s) to be removed from the knowledge base, or a predicate declaration\n" - + " of the form name[arity] to remove all facts for that predicate.\n" - + "Reasoning needs to be invoked after finishing the removal of statements.\n"); - } - - @Override - public String getSynopsis() { - return "remove facts and rules to the knowledge base"; - } - -} +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.Argument; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; + +public class RetractCommandInterpreter implements CommandInterpreter { + + @Override + public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + int factCount = 0; + int ruleCount = 0; + for (Argument argument : command.getArguments()) { + if (argument.fromPositiveLiteral().isPresent()) { + PositiveLiteral literal = argument.fromPositiveLiteral().get(); + Fact fact; + try { + fact = Expressions.makeFact(literal.getPredicate(), literal.getArguments()); + } catch (IllegalArgumentException e) { + throw new CommandExecutionException("Literal " + literal.toString() + " is not a fact.", e); + } + factCount += interpreter.getKnowledgeBase().removeStatement(fact); + } else if (argument.fromRule().isPresent()) { + ruleCount += interpreter.getKnowledgeBase().removeStatement(argument.fromRule().get()); + } else { // implies argument.fromTerm().isPresent() + String predicateDeclaration = Interpreter.extractStringArgument(command, 0, "predicateName[arity]"); + Predicate predicate = AddSourceCommandInterpreter.extractPredicate(predicateDeclaration); + for (Fact fact : interpreter.getKnowledgeBase().getFacts()) { + if (predicate.equals(fact.getPredicate())) { + factCount += interpreter.getKnowledgeBase().removeStatement(fact); + } + } + } + } + + interpreter.printNormal("Retracted " + factCount + " fact(s) and " + ruleCount + " rule(s).\n"); + } + + @Override + public void printHelp(String commandName, Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " ()+ .\n" + + " fact or rule: statement(s) to be removed from the knowledge base, or a predicate declaration\n" + + " of the form name[arity] to remove all facts for that predicate.\n" + + "Reasoning needs to be invoked after finishing the removal of statements.\n"); + } + + @Override + public String getSynopsis() { + return "remove facts and rules to the knowledge base"; + } + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java index c713f7789..583621c68 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java @@ -26,22 +26,22 @@ public class SetPrefixCommandInterpreter implements CommandInterpreter { @Override - public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + public void run(final Command command, final Interpreter interpreter) throws CommandExecutionException { Interpreter.validateArgumentCount(command, 2); - String prefixName = Interpreter.extractStringArgument(command, 0, "prefix name"); - String prefixIri = Interpreter.extractNameArgument(command, 1, "prefix IRI"); + final String prefixName = Interpreter.extractStringArgument(command, 0, "prefix name"); + final String prefixIri = Interpreter.extractNameArgument(command, 1, "prefix IRI"); interpreter.getKnowledgeBase().getPrefixDeclarationRegistry().unsetPrefix(prefixName); try { interpreter.getKnowledgeBase().getPrefixDeclarationRegistry().setPrefixIri(prefixName, prefixIri); - } catch (PrefixDeclarationException e) { // practically impossible + } catch (final PrefixDeclarationException e) { // practically impossible throw new CommandExecutionException("Setting prefix failed: " + e.getMessage()); } } @Override - public void printHelp(String commandName, Interpreter interpreter) { - interpreter.printNormal("Usage: @" + commandName + " : .\n"); + public void printHelp(final String commandName, final Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " : .\n"); } @Override diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreterTest.java index f9a8189cc..cc693eb7b 100644 --- a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreterTest.java +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreterTest.java @@ -1,144 +1,144 @@ -package org.semanticweb.rulewerk.commands; - -/*- - * #%L - * Rulewerk command execution support - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import static org.junit.Assert.*; - -import java.io.StringWriter; -import java.util.Arrays; -import java.util.List; - -import org.junit.Test; -import org.semanticweb.rulewerk.core.model.api.Command; -import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; -import org.semanticweb.rulewerk.core.model.api.Fact; -import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; -import org.semanticweb.rulewerk.core.model.api.Predicate; -import org.semanticweb.rulewerk.core.model.api.Rule; -import org.semanticweb.rulewerk.core.model.api.Term; -import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.parser.ParsingException; - -public class RetractCommandInterpreterTest { - - @Test - public void correctUse_succeeds() throws ParsingException, CommandExecutionException { - StringWriter writer = new StringWriter(); - Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); - Term a = Expressions.makeAbstractConstant("a"); - Term x = Expressions.makeUniversalVariable("X"); - Predicate p = Expressions.makePredicate("p", 1); - Predicate q = Expressions.makePredicate("q", 1); - Predicate r = Expressions.makePredicate("r", 1); - Fact fact = Expressions.makeFact(p, a); - Fact fact2 = Expressions.makeFact(q, a); - PositiveLiteral headLiteral = Expressions.makePositiveLiteral(q, x); - PositiveLiteral bodyLiteral = Expressions.makePositiveLiteral(r, x); - Rule rule = Expressions.makeRule(headLiteral, bodyLiteral); - interpreter.getKnowledgeBase().addStatement(fact); - interpreter.getKnowledgeBase().addStatement(fact2); - interpreter.getKnowledgeBase().addStatement(rule); - - Command command = interpreter.parseCommand("@retract p(a) q(?X) :- r(?X) ."); - interpreter.runCommand(command); - List facts = interpreter.getKnowledgeBase().getFacts(); - List rules = interpreter.getKnowledgeBase().getRules(); - List dataSourceDeclarations = interpreter.getKnowledgeBase().getDataSourceDeclarations(); - - assertEquals("retract", command.getName()); - assertEquals(2, command.getArguments().size()); - assertTrue(command.getArguments().get(0).fromPositiveLiteral().isPresent()); - assertTrue(command.getArguments().get(1).fromRule().isPresent()); - - assertEquals(Arrays.asList(fact2), facts); - assertTrue(rules.isEmpty()); - assertTrue(dataSourceDeclarations.isEmpty()); - } - - @Test - public void correctUse_retractPredicate_succeeds() throws ParsingException, CommandExecutionException { - StringWriter writer = new StringWriter(); - Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); - Term a = Expressions.makeAbstractConstant("a"); - Term b = Expressions.makeAbstractConstant("b"); - Predicate p = Expressions.makePredicate("p", 1); - Predicate q = Expressions.makePredicate("q", 1); - Fact pa = Expressions.makeFact(p, a); - Fact pb = Expressions.makeFact(p, b); - Fact qa = Expressions.makeFact(q, a); - - interpreter.getKnowledgeBase().addStatement(pa); - interpreter.getKnowledgeBase().addStatement(pb); - interpreter.getKnowledgeBase().addStatement(qa); - - Command command = interpreter.parseCommand("@retract p[1] ."); - interpreter.runCommand(command); - List facts = interpreter.getKnowledgeBase().getFacts(); - List rules = interpreter.getKnowledgeBase().getRules(); - List dataSourceDeclarations = interpreter.getKnowledgeBase().getDataSourceDeclarations(); - - assertEquals(Arrays.asList(qa), facts); - assertTrue(rules.isEmpty()); - assertTrue(dataSourceDeclarations.isEmpty()); - } - - @Test(expected = CommandExecutionException.class) - public void wrongArgumentTermNumber_fails() throws ParsingException, CommandExecutionException { - StringWriter writer = new StringWriter(); - Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); - - Command command = interpreter.parseCommand("@retract 42 ."); - interpreter.runCommand(command); - } - - @Test(expected = CommandExecutionException.class) - public void wrongArgumentTermStringNoPredicate_fails() throws ParsingException, CommandExecutionException { - StringWriter writer = new StringWriter(); - Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); - - Command command = interpreter.parseCommand("@retract \"string\" ."); - interpreter.runCommand(command); - } - - @Test(expected = CommandExecutionException.class) - public void wrongArgumentNonFact_fails() throws ParsingException, CommandExecutionException { - StringWriter writer = new StringWriter(); - Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); - - Command command = interpreter.parseCommand("@retract p(?X) ."); - interpreter.runCommand(command); - } - - @Test - public void help_succeeds() throws ParsingException, CommandExecutionException { - StringWriter writer = new StringWriter(); - Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); - CommandInterpreter commandInterpreter = new RetractCommandInterpreter(); - InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); - } - - @Test - public void synopsis_succeeds() throws ParsingException, CommandExecutionException { - CommandInterpreter commandInterpreter = new RetractCommandInterpreter(); - InterpreterTest.checkSynopsisFormat(commandInterpreter); - } - -} +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.StringWriter; +import java.util.Arrays; +import java.util.List; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class RetractCommandInterpreterTest { + + @Test + public void correctUse_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + Term a = Expressions.makeAbstractConstant("a"); + Term x = Expressions.makeUniversalVariable("X"); + Predicate p = Expressions.makePredicate("p", 1); + Predicate q = Expressions.makePredicate("q", 1); + Predicate r = Expressions.makePredicate("r", 1); + Fact fact = Expressions.makeFact(p, a); + Fact fact2 = Expressions.makeFact(q, a); + PositiveLiteral headLiteral = Expressions.makePositiveLiteral(q, x); + PositiveLiteral bodyLiteral = Expressions.makePositiveLiteral(r, x); + Rule rule = Expressions.makeRule(headLiteral, bodyLiteral); + interpreter.getKnowledgeBase().addStatement(fact); + interpreter.getKnowledgeBase().addStatement(fact2); + interpreter.getKnowledgeBase().addStatement(rule); + + Command command = interpreter.parseCommand("@retract p(a) q(?X) :- r(?X) ."); + interpreter.runCommand(command); + List facts = interpreter.getKnowledgeBase().getFacts(); + List rules = interpreter.getKnowledgeBase().getRules(); + List dataSourceDeclarations = interpreter.getKnowledgeBase().getDataSourceDeclarations(); + + assertEquals("retract", command.getName()); + assertEquals(2, command.getArguments().size()); + assertTrue(command.getArguments().get(0).fromPositiveLiteral().isPresent()); + assertTrue(command.getArguments().get(1).fromRule().isPresent()); + + assertEquals(Arrays.asList(fact2), facts); + assertTrue(rules.isEmpty()); + assertTrue(dataSourceDeclarations.isEmpty()); + } + + @Test + public void correctUse_retractPredicate_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + Term a = Expressions.makeAbstractConstant("a"); + Term b = Expressions.makeAbstractConstant("b"); + Predicate p = Expressions.makePredicate("p", 1); + Predicate q = Expressions.makePredicate("q", 1); + Fact pa = Expressions.makeFact(p, a); + Fact pb = Expressions.makeFact(p, b); + Fact qa = Expressions.makeFact(q, a); + + interpreter.getKnowledgeBase().addStatement(pa); + interpreter.getKnowledgeBase().addStatement(pb); + interpreter.getKnowledgeBase().addStatement(qa); + + Command command = interpreter.parseCommand("@retract p[1] ."); + interpreter.runCommand(command); + List facts = interpreter.getKnowledgeBase().getFacts(); + List rules = interpreter.getKnowledgeBase().getRules(); + List dataSourceDeclarations = interpreter.getKnowledgeBase().getDataSourceDeclarations(); + + assertEquals(Arrays.asList(qa), facts); + assertTrue(rules.isEmpty()); + assertTrue(dataSourceDeclarations.isEmpty()); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentTermNumber_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@retract 42 ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentTermStringNoPredicate_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@retract \"string\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentNonFact_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@retract p(?X) ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new RetractCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new RetractCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} From 4c696f370d1b229804a5513f59499794b7e7fb21 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Wed, 2 Sep 2020 22:06:11 +0200 Subject: [PATCH 0809/1003] correct unit test exit command help message --- .../client/shell/commands/ExitCommandInterpreter.java | 2 +- .../client/shell/commands/ExitCommandInterpreterTest.java | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java index f9214afae..ceb284828 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java @@ -61,7 +61,7 @@ public ExitCommandInterpreter(final Shell shell) { @Override public void printHelp(final String commandName, final Interpreter interpreter) { - interpreter.printNormal("Usage: @" + commandName + ".\n"); + interpreter.printNormal("Usage: @" + commandName + " .\n"); } @Override diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java index 33da1aa4c..2a2fd0baf 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java @@ -61,10 +61,10 @@ public void help_succeeds() throws ParsingException, CommandExecutionException { final Interpreter interpreterSpy = Mockito.spy(interpreter); commandInterpreter.printHelp("commandname", interpreterSpy); - Mockito.verify(interpreterSpy).printNormal("Usage: commandname.\n"); + Mockito.verify(interpreterSpy).printNormal("Usage: @commandname .\n"); final String result = writer.toString(); - assertEquals("Usage: commandname.\n", result); + assertEquals("Usage: @commandname .\n", result); } @Test From 90bc16c5ef7bb4a606c72dd0541dc633f1b2d519 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 3 Sep 2020 15:42:41 +0200 Subject: [PATCH 0810/1003] correct help messages for @query and @export commands --- .../commands/ExportCommandInterpreter.java | 4 +- .../commands/QueryCommandInterpreter.java | 72 +++++++++---------- 2 files changed, 38 insertions(+), 38 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java index 3cffcf25f..30455240f 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java @@ -52,9 +52,9 @@ public void run(final Command command, final Interpreter interpreter) throws Com @Override public void printHelp(final String commandName, final Interpreter interpreter) { - interpreter.printNormal("Usage: @" + commandName + " TASK \"filename\" .\n" // + interpreter.printNormal("Usage: @" + commandName + " TASK \"file\" .\n" // + " TASK: what to export; can be KB or INFERENCES\n" // - + " \"filename\": string path export file (suggested extension: .rls)\n"); + + " \"file\": path to export file (suggested extension: .rls), enclosed in quotes\n"); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java index 310505656..35e53a67a 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java @@ -48,25 +48,25 @@ public class QueryCommandInterpreter implements CommandInterpreter { private String csvFile; @Override - public void run(Command command, Interpreter interpreter) throws CommandExecutionException { - processArguments(command.getArguments()); + public void run(final Command command, final Interpreter interpreter) throws CommandExecutionException { + this.processArguments(command.getArguments()); - if (doCount) { - printCountQueryResults(interpreter); - } else if (csvFile == null) { - printQueryResults(interpreter); + if (this.doCount) { + this.printCountQueryResults(interpreter); + } else if (this.csvFile == null) { + this.printQueryResults(interpreter); } else { - exportQueryResults(interpreter); + this.exportQueryResults(interpreter); } } @Override - public void printHelp(String commandName, Interpreter interpreter) { + public void printHelp(final String commandName, final Interpreter interpreter) { interpreter.printNormal( - "Usage: @" + commandName + " [COUNT] [LIMIT ] [EXPORTCSV ] .\n" + "Usage: @" + commandName + " [COUNT] [LIMIT ] [EXPORTCSV <\"file\">] .\n" + " query literal: positive literal, possibly with ?queryVariables\n" + " limit: maximal number of results to be shown\n" - + " filename: string path to CSV file for exporting query results\n"); + + " \"file\": path to CSV file for exporting query results, enclosed in quotes\n"); } @Override @@ -74,19 +74,19 @@ public String getSynopsis() { return "print or export query results"; } - private void processArguments(List arguments) throws CommandExecutionException { + private void processArguments(final List arguments) throws CommandExecutionException { int pos = 0; - limit = -1; - doCount = false; - csvFile = null; + this.limit = -1; + this.doCount = false; + this.csvFile = null; if (arguments.size() > 0 && KEYWORD_COUNT.equals(arguments.get(0).fromTerm().orElse(null))) { - doCount = true; + this.doCount = true; pos++; } if (arguments.size() > pos && arguments.get(pos).fromPositiveLiteral().isPresent()) { - queryLiteral = arguments.get(pos).fromPositiveLiteral().get(); + this.queryLiteral = arguments.get(pos).fromPositiveLiteral().get(); pos++; } else { throw new CommandExecutionException("A query literal must be given."); @@ -96,18 +96,18 @@ private void processArguments(List arguments) throws CommandExecutionE if (arguments.size() > pos + 1 && KEYWORD_LIMIT.equals(arguments.get(pos).fromTerm().orElse(null)) && arguments.get(pos + 1).fromTerm().isPresent()) { try { - limit = Terms.extractInt(arguments.get(pos + 1).fromTerm().get()); + this.limit = Terms.extractInt(arguments.get(pos + 1).fromTerm().get()); pos += 2; - } catch (IllegalArgumentException e) { + } catch (final IllegalArgumentException e) { throw new CommandExecutionException( "Invalid limit given: " + arguments.get(pos + 1).fromTerm().get()); } } else if (arguments.size() > pos + 1 && KEYWORD_TOFILE.equals(arguments.get(pos).fromTerm().orElse(null)) && arguments.get(pos + 1).fromTerm().isPresent()) { try { - csvFile = Terms.extractString(arguments.get(pos + 1).fromTerm().get()); + this.csvFile = Terms.extractString(arguments.get(pos + 1).fromTerm().get()); pos += 2; - } catch (IllegalArgumentException e) { + } catch (final IllegalArgumentException e) { throw new CommandExecutionException( "Invalid filename given: " + arguments.get(pos + 1).fromTerm().get()); } @@ -117,17 +117,17 @@ private void processArguments(List arguments) throws CommandExecutionE } } - private void printCountQueryResults(Interpreter interpreter) throws CommandExecutionException { - if (limit != -1) { + private void printCountQueryResults(final Interpreter interpreter) throws CommandExecutionException { + if (this.limit != -1) { throw new CommandExecutionException("LIMIT not supported with COUNT"); } - if (csvFile != null) { + if (this.csvFile != null) { throw new CommandExecutionException("COUNT results cannot be exported to CSV"); } - Timer timer = new Timer("query"); + final Timer timer = new Timer("query"); timer.start(); - QueryAnswerCount count = interpreter.getReasoner().countQueryAnswers(queryLiteral); + final QueryAnswerCount count = interpreter.getReasoner().countQueryAnswers(this.queryLiteral); timer.stop(); interpreter.printNormal(String.valueOf(count.getCount()) + "\n"); @@ -135,14 +135,14 @@ private void printCountQueryResults(Interpreter interpreter) throws CommandExecu interpreter.printNormal(" This result is " + count.getCorrectness() + ".\n"); } - private void printQueryResults(Interpreter interpreter) throws CommandExecutionException { - LiteralQueryResultPrinter printer = new LiteralQueryResultPrinter(queryLiteral, interpreter.getWriter(), + private void printQueryResults(final Interpreter interpreter) throws CommandExecutionException { + final LiteralQueryResultPrinter printer = new LiteralQueryResultPrinter(this.queryLiteral, interpreter.getWriter(), interpreter.getKnowledgeBase().getPrefixDeclarationRegistry()); - Timer timer = new Timer("query"); + final Timer timer = new Timer("query"); timer.start(); - try (final QueryResultIterator answers = interpreter.getReasoner().answerQuery(queryLiteral, true)) { - while (printer.getResultCount() != limit && answers.hasNext()) { + try (final QueryResultIterator answers = interpreter.getReasoner().answerQuery(this.queryLiteral, true)) { + while (printer.getResultCount() != this.limit && answers.hasNext()) { printer.write(answers.next()); } timer.stop(); @@ -155,22 +155,22 @@ private void printQueryResults(Interpreter interpreter) throws CommandExecutionE printer.getResultCount() + " result(s) in " + timer.getTotalCpuTime() / 1000000 + "ms."); } interpreter.printNormal(" Results are " + answers.getCorrectness() + ".\n"); - } catch (IOException e) { + } catch (final IOException e) { throw new CommandExecutionException(e.getMessage(), e); } } - private void exportQueryResults(Interpreter interpreter) throws CommandExecutionException { - if (limit != -1) { + private void exportQueryResults(final Interpreter interpreter) throws CommandExecutionException { + if (this.limit != -1) { throw new CommandExecutionException("LIMIT not supported for CSV export"); } - Timer timer = new Timer("query"); + final Timer timer = new Timer("query"); timer.start(); Correctness correctness; try { - correctness = interpreter.getReasoner().exportQueryAnswersToCsv(queryLiteral, csvFile, true); - } catch (IOException e) { + correctness = interpreter.getReasoner().exportQueryAnswersToCsv(this.queryLiteral, this.csvFile, true); + } catch (final IOException e) { throw new CommandExecutionException(e.getMessage(), e); } timer.stop(); From 123531c2d1c4a4abc9f1d259a91a6387ff5f8870 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 3 Sep 2020 16:21:24 +0200 Subject: [PATCH 0811/1003] Update README.md --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 5d183be1f..f8619445b 100644 --- a/README.md +++ b/README.md @@ -56,3 +56,4 @@ Development Use the script `build-vlog-library.sh` to create and install it on your machine (you may need to delete previous local builds first). * Users of Eclipse should install the javacc plugin to generate the parser sources. After installing the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.rulewerk.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. * We largely follow [Java Programming Style Guidelines published by Petroware](https://petroware.no/javastyle.html). The main exception are the names of private members, which do not usually end in underscores in our code. +* To build the standalone client jar, run `mvn install -Pclient`. This generates `standalone-rulewerk-client-[VERSION].jar` in `rulewerk-client/target`. From 622e39dc95b09d14bd96fbffe9db11bcc7dcdf0f Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 3 Sep 2020 16:23:03 +0200 Subject: [PATCH 0812/1003] remove unused dependency from rulewerk-client pom --- rulewerk-client/pom.xml | 5 ----- 1 file changed, 5 deletions(-) diff --git a/rulewerk-client/pom.xml b/rulewerk-client/pom.xml index f05da9f94..d8bcb3d67 100644 --- a/rulewerk-client/pom.xml +++ b/rulewerk-client/pom.xml @@ -68,11 +68,6 @@ jansi ${jansi.version}
    - From e19ac6a57742ed8ec7712db9dfeea6aef5123d8c Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 3 Sep 2020 16:25:50 +0200 Subject: [PATCH 0813/1003] update to new vlog-java release version 1.3.4 --- rulewerk-vlog/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-vlog/pom.xml b/rulewerk-vlog/pom.xml index 5e767200f..fa7921ee2 100644 --- a/rulewerk-vlog/pom.xml +++ b/rulewerk-vlog/pom.xml @@ -17,7 +17,7 @@ Bindings for the VLog reasoner backend. - 1.3.3 + 1.3.4 vlog-java From 440d3cdefb135fb65b8839273505eb0eced73eaa Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 3 Sep 2020 16:34:47 +0200 Subject: [PATCH 0814/1003] update version to release version 0.7.7 --- README.md | 4 ++-- coverage/pom.xml | 2 +- pom.xml | 2 +- rulewerk-client/pom.xml | 2 +- rulewerk-commands/pom.xml | 2 +- rulewerk-core/pom.xml | 2 +- rulewerk-examples/pom.xml | 2 +- rulewerk-graal/pom.xml | 2 +- rulewerk-owlapi/pom.xml | 2 +- rulewerk-parser/pom.xml | 2 +- rulewerk-rdf/pom.xml | 2 +- rulewerk-vlog/pom.xml | 2 +- 12 files changed, 13 insertions(+), 13 deletions(-) diff --git a/README.md b/README.md index f8619445b..221f3a1b0 100644 --- a/README.md +++ b/README.md @@ -9,13 +9,13 @@ A Java library based on the [VLog rule engine](https://github.com/karmaresearch/ Installation ------------ -The current release of Rulewerk is version 0.6.0. The easiest way of using the library is with Maven. Maven users must add the following dependency to the dependencies in their pom.xml file: +The current release of Rulewerk is version 0.7.0. The easiest way of using the library is with Maven. Maven users must add the following dependency to the dependencies in their pom.xml file: ``` org.semanticweb.rulewerk rulewerk-core - 0.6.0 + 0.7.0 ``` diff --git a/coverage/pom.xml b/coverage/pom.xml index 76ca8882d..40e671d48 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0-SNAPSHOT + 0.7.0 coverage diff --git a/pom.xml b/pom.xml index bd2ca0a0d..300fb51a4 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0-SNAPSHOT + 0.7.0 pom Rulewerk diff --git a/rulewerk-client/pom.xml b/rulewerk-client/pom.xml index d8bcb3d67..f7b3e3a7c 100644 --- a/rulewerk-client/pom.xml +++ b/rulewerk-client/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0-SNAPSHOT + 0.7.0 rulewerk-client diff --git a/rulewerk-commands/pom.xml b/rulewerk-commands/pom.xml index 97509c6c6..599a526a5 100644 --- a/rulewerk-commands/pom.xml +++ b/rulewerk-commands/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0-SNAPSHOT + 0.7.0 rulewerk-commands diff --git a/rulewerk-core/pom.xml b/rulewerk-core/pom.xml index 34701907a..dc562d9ab 100644 --- a/rulewerk-core/pom.xml +++ b/rulewerk-core/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0-SNAPSHOT + 0.7.0 rulewerk-core diff --git a/rulewerk-examples/pom.xml b/rulewerk-examples/pom.xml index 975a3b56d..739f580cb 100644 --- a/rulewerk-examples/pom.xml +++ b/rulewerk-examples/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0-SNAPSHOT + 0.7.0 rulewerk-examples diff --git a/rulewerk-graal/pom.xml b/rulewerk-graal/pom.xml index e5621cfbb..34ef21c2d 100644 --- a/rulewerk-graal/pom.xml +++ b/rulewerk-graal/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0-SNAPSHOT + 0.7.0 rulewerk-graal diff --git a/rulewerk-owlapi/pom.xml b/rulewerk-owlapi/pom.xml index d351dd2c3..2b50c042d 100644 --- a/rulewerk-owlapi/pom.xml +++ b/rulewerk-owlapi/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0-SNAPSHOT + 0.7.0 rulewerk-owlapi diff --git a/rulewerk-parser/pom.xml b/rulewerk-parser/pom.xml index 75487af58..c88769034 100644 --- a/rulewerk-parser/pom.xml +++ b/rulewerk-parser/pom.xml @@ -8,7 +8,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0-SNAPSHOT + 0.7.0 rulewerk-parser diff --git a/rulewerk-rdf/pom.xml b/rulewerk-rdf/pom.xml index fe97c337b..ffed1fc13 100644 --- a/rulewerk-rdf/pom.xml +++ b/rulewerk-rdf/pom.xml @@ -8,7 +8,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0-SNAPSHOT + 0.7.0 rulewerk-rdf diff --git a/rulewerk-vlog/pom.xml b/rulewerk-vlog/pom.xml index fa7921ee2..53fc07b13 100644 --- a/rulewerk-vlog/pom.xml +++ b/rulewerk-vlog/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0-SNAPSHOT + 0.7.0 rulewerk-vlog From a5573c864a4f70868ea666b96911bdf1a119d54c Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 3 Sep 2020 16:49:28 +0200 Subject: [PATCH 0815/1003] use forward slash in path auto-completion --- .../rulewerk/client/shell/DefaultShellConfiguration.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java index f893512f7..5a82c72e6 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java @@ -68,6 +68,8 @@ LineReaderBuilder getDefaultLineReaderConfiguration(final LineReaderBuilder line */ lineReaderBuilder.option(LineReader.Option.INSERT_TAB, false); lineReaderBuilder.option(LineReader.Option.AUTO_FRESH_LINE, true); + lineReaderBuilder.option(LineReader.Option.USE_FORWARD_SLASH, true); + return lineReaderBuilder; } From b9db5368d02648b53fb1b6c22a480c9db4ae71e8 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 3 Sep 2020 17:51:33 +0200 Subject: [PATCH 0816/1003] comment out building unreleased version of VLog --- .travis.yml | 80 ++++++++++++++++++++++++++--------------------------- 1 file changed, 40 insertions(+), 40 deletions(-) diff --git a/.travis.yml b/.travis.yml index cb87765d8..3259c75de 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,40 +1,40 @@ -language: java -os: linux -jobs: - include: - - os: linux - dist: bionic - jdk: openjdk11 - after_success: - - mvn clean test jacoco:report coveralls:report - - - os: linux - dist: xenial - addons: - apt: - sources: - - ubuntu-toolchain-r-test - packages: - - gcc-6 - - g++-6 - - libstdc++6 - env: CC=gcc-6 CXX=g++-6 - jdk: openjdk8 - - - os: osx - osx_image: xcode10.2 - allow_failures: - - dist: trusty - -## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar -before_install: -# # explicitly avoid bash as travis screws with .bashrc, -# # cf. https://travis-ci.community/t/travis-functions-no-such-file-or-directory/2286/12 - - "[ -x /bin/dash ] && /bin/dash ./build-vlog-library.sh || /bin/sh ./build-vlog-library.sh" - -install: mvn install $OPTIONS -DskipTests=true - -cache: - directories: - - ./local_builds - - $HOME/.m2 +language: java +os: linux +jobs: + include: + - os: linux + dist: bionic + jdk: openjdk11 + after_success: + - mvn clean test jacoco:report coveralls:report + + - os: linux + dist: xenial + addons: + apt: + sources: + - ubuntu-toolchain-r-test + packages: + - gcc-6 + - g++-6 + - libstdc++6 + env: CC=gcc-6 CXX=g++-6 + jdk: openjdk8 + + - os: osx + osx_image: xcode10.2 + allow_failures: + - dist: trusty + +## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar +before_install: +# # explicitly avoid bash as travis screws with .bashrc, +# # cf. https://travis-ci.community/t/travis-functions-no-such-file-or-directory/2286/12 +# - "[ -x /bin/dash ] && /bin/dash ./build-vlog-library.sh || /bin/sh ./build-vlog-library.sh" + +install: mvn install $OPTIONS -DskipTests=true + +cache: + directories: + - ./local_builds + - $HOME/.m2 From fc16aaeb3abf7e988dd9088a5789b7f265f0ea6c Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 3 Sep 2020 18:20:29 +0200 Subject: [PATCH 0817/1003] Parser: treat imports and data sources relatively to current file --- .../commands/LoadCommandInterpreter.java | 13 ++-- .../implementation/TridentDataSource.java | 11 +++- .../rulewerk/parser/DirectiveHandler.java | 14 +++-- .../rulewerk/parser/ParserConfiguration.java | 53 ++++++++++++++-- .../CsvFileDataSourceDeclarationHandler.java | 9 +-- .../DataSourceDeclarationHandler.java | 63 ++++++++++++++++--- .../RdfFileDataSourceDeclarationHandler.java | 9 +-- ...eryResultDataSourceDeclarationHandler.java | 6 +- .../TridentDataSourceDeclarationHandler.java | 9 +-- .../ImportFileDirectiveHandler.java | 17 ++--- .../ImportFileRelativeDirectiveHandler.java | 7 ++- .../parser/javacc/SubParserFactory.java | 4 +- .../rulewerk/parser/DirectiveHandlerTest.java | 9 +-- .../parser/RuleParserDataSourceTest.java | 31 +++++---- .../rulewerk/parser/RuleParserTest.java | 45 ++++++++++++- .../src/test/resources/subdir/facts.rls | 4 ++ .../src/test/resources/subdir/parent.rls | 1 + .../src/test/resources/subdir/sibling.rls | 1 + 18 files changed, 236 insertions(+), 70 deletions(-) create mode 100644 rulewerk-parser/src/test/resources/subdir/facts.rls create mode 100644 rulewerk-parser/src/test/resources/subdir/parent.rls create mode 100644 rulewerk-parser/src/test/resources/subdir/sibling.rls diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index c0a223524..c8a2950c6 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -11,9 +11,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -45,13 +45,15 @@ import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.model.api.TermType; import org.semanticweb.rulewerk.owlapi.OwlToRulesConverter; +import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; +import org.semanticweb.rulewerk.parser.ParserConfiguration; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; import org.semanticweb.rulewerk.rdf.RdfModelConverter; /** * Interpreter for the load command. - * + * * @author Markus Kroetzsch * */ @@ -120,7 +122,10 @@ public void run(final Command command, final Interpreter interpreter) throws Com private void loadKb(final Interpreter interpreter, final String fileName) throws CommandExecutionException { try { final InputStream inputStream = interpreter.getFileInputStream(fileName); - RuleParser.parseInto(interpreter.getKnowledgeBase(), inputStream); + final File file = new File(fileName); + final ParserConfiguration parserConfiguration = new DefaultParserConfiguration() + .setImportBasePath(file.getParent()); + RuleParser.parseInto(interpreter.getKnowledgeBase(), inputStream, parserConfiguration); } catch (final FileNotFoundException e) { throw new CommandExecutionException(e.getMessage(), e); } catch (final ParsingException e) { diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java index 54cfba4a8..213c7df2f 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -34,7 +34,7 @@ * storage utility. This is the recommended data source for large RDF * datasets in the VLog reasoner. Trident databases are generated from RDF input * files in a batch process using the Trident tool. - * + * * @author Markus Kroetzsch * */ @@ -63,6 +63,11 @@ public Fact getDeclarationFact() { Expressions.makeDatatypeConstant(filePath, PrefixDeclarationRegistry.XSD_STRING)); } + @Override + public String toString() { + return "[TridentDataSource [tridentFile=" + this.filePath + "]"; + } + @Override public void accept(DataSourceConfigurationVisitor visitor) throws IOException { visitor.visit(this); diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java index 2a8de3aa9..15b5914e9 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java @@ -97,16 +97,22 @@ public static String validateStringArgument(final Argument argument, final Strin * * @param argument the argument to validate * @param description a description of the argument, used in constructing the - * error message. + * error message + * @param importBasePath the path that relative file names are resolved against * - * @throws ParsingException when the given argument is not a valid file path. + * @throws ParsingException when the given argument is not a valid file path * - * @return the File corresponding to the contained file path. + * @return the File corresponding to the contained file path */ - public static File validateFilenameArgument(final Argument argument, final String description) + public static File validateFilenameArgument(final Argument argument, final String description, final String importBasePath) throws ParsingException { String fileName = DirectiveHandler.validateStringArgument(argument, description); File file = new File(fileName); + + if (!file.isAbsolute() || importBasePath.isEmpty()) { + file = new File(importBasePath + File.separator + fileName); + } + try { // we don't care about the actual path, just that there is one. file.toPath(); diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java index 0bf66066d..15d05f3f3 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -57,12 +57,12 @@ public class ParserConfiguration { /** * The registered data sources. */ - private final HashMap dataSources = new HashMap<>(); + private HashMap dataSources = new HashMap<>(); /** * The registered datatypes. */ - private final HashMap datatypes = new HashMap<>(); + private HashMap datatypes = new HashMap<>(); /** * The registered configurable literals. @@ -74,6 +74,28 @@ public class ParserConfiguration { */ private HashMap> directives = new HashMap<>(); + /** + * The current base path to resolve imports against. Defaults to the current + * working directory. + */ + private String importBasePath = System.getProperty("user.dir"); + + public ParserConfiguration() { + } + + /** + * Copy constructor. + * + * @param other {@link ParserConfiguration} to copy + */ + public ParserConfiguration(ParserConfiguration other) { + this.allowNamedNulls = other.allowNamedNulls; + this.dataSources = new HashMap<>(other.dataSources); + this.literals = new HashMap<>(other.literals); + this.directives = new HashMap<>(other.directives); + this.importBasePath = new String(other.importBasePath); + } + /** * Register a new (type of) Data Source. * @@ -121,7 +143,7 @@ public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(PositiveLit throw new ParsingException("Data source \"" + declaration.getPredicate().getName() + "\" is not known."); } - return handler.handleDataSourceDeclaration(declaration.getArguments()); + return handler.handleDataSourceDeclaration(declaration.getArguments(), this.importBasePath); } /** @@ -308,4 +330,25 @@ public ParserConfiguration disallowNamedNulls() { public boolean isParsingOfNamedNullsAllowed() { return this.allowNamedNulls; } + + /** + * Get the base path for file imports. + * + * @return the path that relative imports will be resolved against. + */ + public String getImportBasePath() { + return this.importBasePath; + } + + /** + * Set a new base path for file imports. + * + * @param importBasePath path that relative imports will be resolved against. + */ + public ParserConfiguration setImportBasePath(String importBasePath) { + this.importBasePath = importBasePath; + + return this; + } + } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java index 185f073ca..ceac5b156 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -35,9 +35,10 @@ */ public class CsvFileDataSourceDeclarationHandler implements DataSourceDeclarationHandler { @Override - public DataSource handleDataSourceDeclaration(List terms) throws ParsingException { + public DataSource handleDataSourceDeclaration(List terms, String importBasePath) throws ParsingException { DataSourceDeclarationHandler.validateNumberOfArguments(terms, 1); - String fileName = DataSourceDeclarationHandler.validateStringArgument(terms.get(0), "CSV file name"); + String fileName = DataSourceDeclarationHandler.validateFileNameArgument(terms.get(0), "CSV file name", + importBasePath); try { return new CsvFileDataSource(fileName); diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/DataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/DataSourceDeclarationHandler.java index 88801c331..1aa6a824c 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/DataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/DataSourceDeclarationHandler.java @@ -1,5 +1,6 @@ package org.semanticweb.rulewerk.parser.datasources; +import java.io.File; import java.net.URL; import java.util.List; @@ -12,9 +13,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -36,7 +37,19 @@ @FunctionalInterface public interface DataSourceDeclarationHandler { - DataSource handleDataSourceDeclaration(List terms) throws ParsingException; + /** + * Handle a data source declaration. + * + * @param terms the list of arguments given in the declaration + * @param importBasePath the base path that relative imports will be resolved + * against + * + * @throws ParsingException when the arguments are unsuitable for the data + * source. + * + * @return a DataSource instance. + */ + DataSource handleDataSourceDeclaration(List terms, String importBasePath) throws ParsingException; /** * Validate the provided number of arguments to the source declaration. @@ -56,7 +69,7 @@ public static void validateNumberOfArguments(final List terms, final int n /** * Returns the string content of the given term, or reports an error if the term * is not an xsd:string. - * + * * @param term the term to be processed * @param parameterName the string name of the parameter to be used in error * messages @@ -67,14 +80,14 @@ public static String validateStringArgument(Term term, String parameterName) thr try { return Terms.extractString(term); } catch (IllegalArgumentException e) { - throw makeParameterParsingException(term, parameterName, e); + throw makeParameterParsingException(term, parameterName, "String", e); } } /** * Returns the URL represented by the given term, or reports an error if no * valid URL could be extracted from the term. - * + * * @param term the term to be processed * @param parameterName the string name of the parameter to be used in error * messages @@ -85,12 +98,42 @@ public static URL validateUrlArgument(Term term, String parameterName) throws Pa try { return Terms.extractUrl(term); } catch (IllegalArgumentException e) { - throw makeParameterParsingException(term, parameterName, e); + throw makeParameterParsingException(term, parameterName, "URL", e); + } + } + + /** + * Returns the File name represented by the given term, or reports an error if + * no valid File name could be extracted from the term. + * + * @param term the term to be processed + * @param parameterName the string name of the parameter to be used in error + * messages + * @param importBasePath the base path that relative paths will be resolved + * against + * + * @throws ParsingException when the term was not a valid file path + * @return the extracted file path + */ + public static String validateFileNameArgument(Term term, String parameterName, String importBasePath) + throws ParsingException { + File file; + + try { + file = new File(Terms.extractString(term)); + } catch (IllegalArgumentException e) { + throw makeParameterParsingException(term, parameterName, "File name", e); + } + + if (file.isAbsolute() || importBasePath.isEmpty()) { + return file.getPath(); } + return importBasePath + File.separator + file.getPath(); } - static ParsingException makeParameterParsingException(Term term, String parameterName, Throwable cause) { - return new ParsingException("Expected " + parameterName + " to be a string. Found " + term.toString() + ".", - cause); + static ParsingException makeParameterParsingException(Term term, String parameterName, String type, + Throwable cause) { + return new ParsingException( + "Expected " + parameterName + " to be a " + type + ". Found " + term.toString() + ".", cause); } } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java index 36a3738c4..99b85132c 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -35,9 +35,10 @@ */ public class RdfFileDataSourceDeclarationHandler implements DataSourceDeclarationHandler { @Override - public DataSource handleDataSourceDeclaration(List terms) throws ParsingException { + public DataSource handleDataSourceDeclaration(List terms, String importBasePath) throws ParsingException { DataSourceDeclarationHandler.validateNumberOfArguments(terms, 1); - String fileName = DataSourceDeclarationHandler.validateStringArgument(terms.get(0), "RDF file name"); + String fileName = DataSourceDeclarationHandler.validateFileNameArgument(terms.get(0), "RDF file name", + importBasePath); try { return new RdfFileDataSource(fileName); diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java index 3a405d83c..5faca3ad9 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -35,7 +35,7 @@ */ public class SparqlQueryResultDataSourceDeclarationHandler implements DataSourceDeclarationHandler { - public DataSource handleDataSourceDeclaration(List terms) throws ParsingException { + public DataSource handleDataSourceDeclaration(List terms, String importBasePath) throws ParsingException { DataSourceDeclarationHandler.validateNumberOfArguments(terms, 3); URL endpoint = DataSourceDeclarationHandler.validateUrlArgument(terms.get(0), "SPARQL endpoint URL"); String variables = DataSourceDeclarationHandler.validateStringArgument(terms.get(1), diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/TridentDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/TridentDataSourceDeclarationHandler.java index 830d97c59..6640a414e 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/TridentDataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/TridentDataSourceDeclarationHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -34,9 +34,10 @@ */ public class TridentDataSourceDeclarationHandler implements DataSourceDeclarationHandler { @Override - public DataSource handleDataSourceDeclaration(List terms) throws ParsingException { + public DataSource handleDataSourceDeclaration(List terms, String importBasePath) throws ParsingException { DataSourceDeclarationHandler.validateNumberOfArguments(terms, 1); - String fileName = DataSourceDeclarationHandler.validateStringArgument(terms.get(0), "path to Trident database"); + String fileName = DataSourceDeclarationHandler.validateFileNameArgument(terms.get(0), + "path to Trident database", importBasePath); return new TridentDataSource(fileName); } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java index 403238df4..262815779 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -42,15 +42,18 @@ public class ImportFileDirectiveHandler implements DirectiveHandler { @Override - public KnowledgeBase handleDirective(List arguments, final SubParserFactory subParserFactory) + public KnowledgeBase handleDirective(final List arguments, final SubParserFactory subParserFactory) throws ParsingException { + final ParserConfiguration parserConfiguration = new ParserConfiguration( + getParserConfiguration(subParserFactory)); DirectiveHandler.validateNumberOfArguments(arguments, 1); - File file = DirectiveHandler.validateFilenameArgument(arguments.get(0), "rules file"); - KnowledgeBase knowledgeBase = getKnowledgeBase(subParserFactory); - ParserConfiguration parserConfiguration = getParserConfiguration(subParserFactory); + final File file = DirectiveHandler.validateFilenameArgument(arguments.get(0), "rules file", + parserConfiguration.getImportBasePath()); + final KnowledgeBase knowledgeBase = getKnowledgeBase(subParserFactory); + parserConfiguration.setImportBasePath(file.getParent()); try { - knowledgeBase.importRulesFile(file, (InputStream stream, KnowledgeBase kb) -> { + knowledgeBase.importRulesFile(file, (final InputStream stream, final KnowledgeBase kb) -> { RuleParser.parseInto(kb, stream, parserConfiguration); }); } catch (RulewerkException | IOException | IllegalArgumentException e) { diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java index f70831417..115f4f4e3 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java @@ -44,11 +44,14 @@ public class ImportFileRelativeDirectiveHandler implements DirectiveHandler arguments, SubParserFactory subParserFactory) throws ParsingException { + final ParserConfiguration parserConfiguration = new ParserConfiguration( + getParserConfiguration(subParserFactory)); DirectiveHandler.validateNumberOfArguments(arguments, 1); PrefixDeclarationRegistry prefixDeclarationRegistry = getPrefixDeclarationRegistry(subParserFactory); - File file = DirectiveHandler.validateFilenameArgument(arguments.get(0), "rules file"); + File file = DirectiveHandler.validateFilenameArgument(arguments.get(0), "rules file", + parserConfiguration.getImportBasePath()); KnowledgeBase knowledgeBase = getKnowledgeBase(subParserFactory); - ParserConfiguration parserConfiguration = getParserConfiguration(subParserFactory); + parserConfiguration.setImportBasePath(file.getParent()); try { knowledgeBase.importRulesFile(file, (InputStream stream, KnowledgeBase kb) -> { diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/SubParserFactory.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/SubParserFactory.java index 9a615bf1b..a607ac22e 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/SubParserFactory.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/SubParserFactory.java @@ -11,9 +11,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java index 66f89562a..eaedc2198 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java @@ -30,6 +30,7 @@ import org.semanticweb.rulewerk.core.model.implementation.Expressions; public class DirectiveHandlerTest { + private static final String BASE_PATH = System.getProperty("user.dir"); private static final String STRING = "src/test/resources/facts.rls"; private static final Term STRINGTERM = Expressions.makeDatatypeConstant(STRING, PrefixDeclarationRegistry.XSD_STRING); @@ -42,7 +43,7 @@ public class DirectiveHandlerTest { public void validateStringArgument_stringArgument_succeeds() throws ParsingException { assertEquals(STRING, DirectiveHandler.validateStringArgument(TERM_STRING_ARGUMENT, "string argument")); } - + @Test(expected = ParsingException.class) public void validateStringArgument_stringArgument_throws() throws ParsingException { assertEquals(STRING, DirectiveHandler.validateStringArgument(TERM_INT_ARGUMENT, "string argument")); @@ -55,15 +56,15 @@ public void validateTermArgument_termArgument_succeeds() throws ParsingException @Test public void validateFilenameArgument_filename_succeeds() throws ParsingException { - assertEquals(new File(STRING), - DirectiveHandler.validateFilenameArgument(TERM_STRING_ARGUMENT, "filename argument")); + assertEquals(new File(BASE_PATH + File.separator + STRING), + DirectiveHandler.validateFilenameArgument(TERM_STRING_ARGUMENT, "filename argument", BASE_PATH)); } @Test public void validateFilenameArgument_invalidFilename_throws() throws ParsingException { DirectiveHandler.validateFilenameArgument(Argument .term(Expressions.makeDatatypeConstant(STRING + "-nonexistent", PrefixDeclarationRegistry.XSD_STRING)), - "filename argument"); + "filename argument", BASE_PATH); } } diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java index 8e0c0abb4..6a22ed61b 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -23,6 +23,7 @@ import static org.junit.Assert.*; import static org.mockito.Mockito.*; +import java.io.File; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; @@ -49,6 +50,7 @@ import org.semanticweb.rulewerk.parser.datasources.DataSourceDeclarationHandler; public class RuleParserDataSourceTest { + private static final String BASE_PATH = System.getProperty("user.dir") + File.separator; private static final String EXAMPLE_RDF_FILE_PATH = "src/main/data/input/example.nt.gz"; private static final String EXAMPLE_CSV_FILE_PATH = "src/main/data/input/example.csv"; private static final String WIKIDATA_SPARQL_ENDPOINT_URI = "https://query.wikidata.org/sparql"; @@ -57,14 +59,14 @@ public class RuleParserDataSourceTest { @Test public void testCsvSource() throws ParsingException, IOException { String input = "@source p[2] : load-csv(\"" + EXAMPLE_CSV_FILE_PATH + "\") ."; - CsvFileDataSource csvds = new CsvFileDataSource(EXAMPLE_CSV_FILE_PATH); + CsvFileDataSource csvds = new CsvFileDataSource(BASE_PATH + EXAMPLE_CSV_FILE_PATH); assertEquals(csvds, RuleParser.parseDataSourceDeclaration(input).getDataSource()); } @Test public void testRdfSource() throws ParsingException, IOException { String input = "@source p[3] : load-rdf(\"" + EXAMPLE_RDF_FILE_PATH + "\") ."; - RdfFileDataSource rdfds = new RdfFileDataSource(EXAMPLE_RDF_FILE_PATH); + RdfFileDataSource rdfds = new RdfFileDataSource(BASE_PATH + EXAMPLE_RDF_FILE_PATH); assertEquals(rdfds, RuleParser.parseDataSourceDeclaration(input).getDataSource()); } @@ -142,15 +144,18 @@ public void testCustomDataSource() throws ParsingException { DataSourceDeclarationHandler handler = mock(DataSourceDeclarationHandler.class); ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerDataSource("mock-source", handler); - doReturn(source).when(handler).handleDataSourceDeclaration(ArgumentMatchers.>any()); + doReturn(source).when(handler).handleDataSourceDeclaration(ArgumentMatchers.>any(), + ArgumentMatchers.any()); String input = "@source p[2] : mock-source(\"hello\", \"world\") ."; List expectedArguments = Arrays.asList( Expressions.makeDatatypeConstant("hello", PrefixDeclarationRegistry.XSD_STRING), Expressions.makeDatatypeConstant("world", PrefixDeclarationRegistry.XSD_STRING)); RuleParser.parse(input, parserConfiguration); + final String expectedImportBasePath = System.getProperty("user.dir"); - verify(handler).handleDataSourceDeclaration(ArgumentMatchers.eq(expectedArguments)); + verify(handler).handleDataSourceDeclaration(ArgumentMatchers.eq(expectedArguments), + ArgumentMatchers.eq(expectedImportBasePath)); } @Test @@ -159,9 +164,9 @@ public void sparqlDataSourceDeclarationToStringParsingTest() throws ParsingExcep Predicate predicate1 = Expressions.makePredicate("p", 1); SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(new URL(WIKIDATA_SPARQL_ENDPOINT_URI), "var", "?var wdt:P31 wd:Q5 ."); - DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource); - RuleParser.parseInto(kb, dataSourceDeclaration1.toString()); - assertEquals(dataSourceDeclaration1, kb.getDataSourceDeclarations().get(0)); + DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate1, dataSource); + RuleParser.parseInto(kb, dataSourceDeclaration.toString()); + assertEquals(dataSourceDeclaration, kb.getDataSourceDeclarations().get(0)); } @Test @@ -171,7 +176,8 @@ public void rdfDataSourceDeclarationToStringParsingTest() throws ParsingExceptio RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(EXAMPLE_RDF_FILE_PATH); DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate1, unzippedRdfFileDataSource); - RuleParser.parseInto(kb, dataSourceDeclaration.toString()); + ParserConfiguration parserConfiguration = new DefaultParserConfiguration().setImportBasePath(""); + RuleParser.parseInto(kb, dataSourceDeclaration.toString(), parserConfiguration); assertEquals(dataSourceDeclaration, kb.getDataSourceDeclarations().get(0)); } @@ -182,7 +188,8 @@ public void csvDataSourceDeclarationToStringParsingTest() throws ParsingExceptio CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(EXAMPLE_CSV_FILE_PATH); final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate1, unzippedCsvFileDataSource); - RuleParser.parseInto(kb, dataSourceDeclaration.toString()); + ParserConfiguration parserConfiguration = new DefaultParserConfiguration().setImportBasePath(""); + RuleParser.parseInto(kb, dataSourceDeclaration.toString(), parserConfiguration); assertEquals(dataSourceDeclaration, kb.getDataSourceDeclarations().get(0)); } @@ -200,7 +207,7 @@ public void parseDataSourceDeclaration_windowsStylePathName_succeeds() throws Pa public void testTridentSource_succeeds() throws ParsingException, IOException { String input = "@source p[2] : trident(\"" + EXAMPLE_TRIDENT_PATH + "\") ."; DataSource parsed = RuleParser.parseDataSourceDeclaration(input).getDataSource(); - TridentDataSource expected = new TridentDataSource(EXAMPLE_TRIDENT_PATH); + TridentDataSource expected = new TridentDataSource(BASE_PATH + EXAMPLE_TRIDENT_PATH); assertEquals(expected, parsed); } diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java index 3b3b77b60..797d6c59c 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -51,7 +51,9 @@ public class RuleParserTest implements ParserTestUtils { private final Variable z = Expressions.makeUniversalVariable("Z"); private final Constant c = Expressions.makeAbstractConstant("http://example.org/c"); private final Constant d = Expressions.makeAbstractConstant("http://example.org/d"); + private final Constant e = Expressions.makeAbstractConstant("https://example.org/e"); private final Constant abc = Expressions.makeDatatypeConstant("abc", PrefixDeclarationRegistry.XSD_STRING); + private final Constant xyz = Expressions.makeDatatypeConstant("xyz", PrefixDeclarationRegistry.XSD_STRING); private final Literal atom1 = Expressions.makePositiveLiteral("http://example.org/p", x, c); private final Literal negAtom1 = Expressions.makeNegativeLiteral("http://example.org/p", x, c); private final Literal atom2 = Expressions.makePositiveLiteral("http://example.org/p", x, z); @@ -60,6 +62,9 @@ public class RuleParserTest implements ParserTestUtils { private final PositiveLiteral fact1 = Expressions.makePositiveLiteral("http://example.org/s", c); private final PositiveLiteral fact2 = Expressions.makePositiveLiteral("p", abc); private final PositiveLiteral fact3 = Expressions.makePositiveLiteral("http://example.org/p", abc); + private final PositiveLiteral fact4 = Expressions.makePositiveLiteral("https://example.org/s", e); + private final PositiveLiteral fact5 = Expressions.makePositiveLiteral("q", xyz); + private final PositiveLiteral fact6 = Expressions.makePositiveLiteral("http://example.org/p", abc); private final Conjunction body1 = Expressions.makeConjunction(atom1, atom2); private final Conjunction body2 = Expressions.makeConjunction(negAtom1, atom2); private final Conjunction head = Expressions.makePositiveConjunction(atom3, atom4); @@ -504,6 +509,42 @@ public void parse_relativeImportStatement_succeeds() throws ParsingException { assertEquals(expected, result); } + @Test + public void parse_importStatement_relativeImport_succeeds() throws ParsingException { + String input = "@import \"src/test/resources/subdir/sibling.rls\" ."; + KnowledgeBase knowledgeBase = RuleParser.parse(input); + List expected = Arrays.asList(fact4, fact5); + List result = knowledgeBase.getFacts(); + assertEquals(expected, result); + } + + @Test + public void parse_importStatement_relativeParentImport_succeeds() throws ParsingException { + String input = "@import \"src/test/resources/subdir/parent.rls\" ."; + KnowledgeBase knowledgeBase = RuleParser.parse(input); + List expected = Arrays.asList(fact1, fact2); + List result = knowledgeBase.getFacts(); + assertEquals(expected, result); + } + + @Test + public void parse_relativeImportStatement_relativeImport_succeeds() throws ParsingException { + String input = "@base . @import-relative \"src/test/resources/subdir/sibling.rls\" ."; + KnowledgeBase knowledgeBase = RuleParser.parse(input); + List expected = Arrays.asList(fact4, fact5); + List result = knowledgeBase.getFacts(); + assertEquals(expected, result); + } + + @Test + public void parse_relativeImportStatement_relativeParentImport_succeeds() throws ParsingException { + String input = "@base . @import-relative \"src/test/resources/subdir/parent.rls\" ."; + KnowledgeBase knowledgeBase = RuleParser.parse(input); + List expected = Arrays.asList(fact1, fact2); + List result = knowledgeBase.getFacts(); + assertEquals(expected, result); + } + @Test public void parse_import_renamesNamedNulls() throws ParsingException { String input = "p(_:blank) . @import \"src/test/resources/blank.rls\" ."; diff --git a/rulewerk-parser/src/test/resources/subdir/facts.rls b/rulewerk-parser/src/test/resources/subdir/facts.rls new file mode 100644 index 000000000..b7b5da5ef --- /dev/null +++ b/rulewerk-parser/src/test/resources/subdir/facts.rls @@ -0,0 +1,4 @@ +@prefix ex: . + +ex:s(ex:e) . +q("xyz") . diff --git a/rulewerk-parser/src/test/resources/subdir/parent.rls b/rulewerk-parser/src/test/resources/subdir/parent.rls new file mode 100644 index 000000000..4abb16ff5 --- /dev/null +++ b/rulewerk-parser/src/test/resources/subdir/parent.rls @@ -0,0 +1 @@ +@import "../facts.rls" . diff --git a/rulewerk-parser/src/test/resources/subdir/sibling.rls b/rulewerk-parser/src/test/resources/subdir/sibling.rls new file mode 100644 index 000000000..0d1d18bfb --- /dev/null +++ b/rulewerk-parser/src/test/resources/subdir/sibling.rls @@ -0,0 +1 @@ +@import "facts.rls" . From ab4e96b6dc53052d956019e12979295ec6563c24 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 3 Sep 2020 18:25:00 +0200 Subject: [PATCH 0818/1003] Add relative paths handling to release notes --- RELEASE-NOTES.md | 299 ++++++++++++++++++++++++----------------------- 1 file changed, 150 insertions(+), 149 deletions(-) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index d7fbee0a6..2b6ca3371 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -1,149 +1,150 @@ -Rulewerk Release Notes -====================== - -Rulewerk v0.7.0 ---------------- - -New features: -* New interactive Rulewerk shell for rule reasoning from the command line client -* Significant speedup in iterating over query results -* Support for using data from a Trident database, the recommended data source for large - RDF graphs in VLog -* More features to control how Rulewerk imports RDF data using rulewerk-rdf module -* New class `LiteralQueryResultPrinter` for pretty-printing query results - -Other improvements: -* Improved serialization of knowledge bases (using namespaces) -* Simple (non-IRI, namespace-less) predicate names can now include - and _ -* Nulls in input data (aka "blank nodes") are now properly skolemized for VLog -* InMemoryGraphAnalysisExample now counts proper triangles using negation to avoid "triangles" where - two or more edges are the same. - -Breaking changes: -* The `RdfModelConverter` class from the rdf package is no longer static (and has more options) -* The `Serializer` class in the core package has been replaced by a new implementation - with a completely different interface. -* The methods `getSerialization` that were present in most syntax objects have been removed. Use `toString()` instead for simple serializations, or invoke a custom Serializer. -* The `DataSource` interface requires a new method to be implemented. - -Rulewerk v0.6.0 ---------------- - -Breaking changes: -* VLog4j is now called Rulewerk. Consequently, the groupId, artifact Ids, and package names - of the project have changed. -* In the examples package, `ExamplesUtils.getQueryAnswerCount(queryString, reasoner)` does no - longer exist. It can be replaced by - `reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral(queryString)).getCount()` -* The `FileDataSource` constructor and those of child classes (`CsvFileDataSource`, `RdfFileDataSource`) - now take the String path to a file instead of `File` object. -* The VLog backend has been moved to a new `rulewerk-vlog` module, - changing several import paths. `Reasoner.getInstance()` is - gone. Furthermore, `InMemoryDataSource` has become an abstract class, - use `VLogInMemoryDataSource` where applicable. - -New features: -* Counting query answers is more efficient now, using `Reasoner.countQueryAnswers()` -* All inferred facts can be serialized to a file using `Reasoner.writeInferences()` -* All inferred facts can be obtained as a Stream using `Reasoner.getInferences()` -* `Reasoner.getCorrectness()` returns the correctness result of the last reasoning task. -* Knowledge bases can be serialized to a file using `KnowlegdeBase.writeKnowledgeBase()` -* Rules files may import other rules files using `@import` and - `@import-relative`, where the latter resolves relative IRIs using - the current base IRI, unless the imported file explicitly specifies - a different one. -* Named nulls of the form `_:name` are now allowed during parsing (but - may not occur in rule bodies). They are renamed to assure that they - are distinct on a per-file level. -* The parser allows custom directives to be implemented, and a certain - set of delimiters allows for custom literal expressions. - -Other improvements: -* Prefix declarations are now kept as part of the Knowledge Base and - are used to abbreviate names when exporting inferences. - -Bugfixes: -* Several reasoning errors in VLog (backend) have been discovered and fixed in the version used now - -VLog4j v0.5.0 -------------- - -Breaking changes: -* The data model for rules has been refined and changed: - * Instead of Constant, specific types of constants are used to capture abtract and data values - * Instead of Variable, ExistentialVariable and UniversalVariable now indicate quantification - * Blank was renamed to NamedNull to avoid confusion with RDF blank nodes - * Methods to access terms now use Java Streams and are unified across syntactic objects -* Data source declarations now use brackets to denote arity, e.g., `@source predicate[2]: load-csv()` - -New features: -* New module vlog4j-client provides a stand-alone command line client jar for VLog4j -* A wiki for VLog4j use and related publications has been created: https://github.com/knowsys/vlog4j/wiki -* The parser behaviour for data source declarations and certain datatype literals can be customised. - -Other improvements: -* Data model is better aligned with syntax supported by parser -* Java object Statements (rules, facts, datasource declarations) String representation is parseable -* OWL API dependency has been upgraded from 4.5.1 to latest (5.1.11) -* SL4J dependency has been upgraded from 1.7.10 to latest (1.7.28) -* Cobertura test coverage tool has been replaced by JaCoCo - -Bugfixes: -* Acyclicity checks work again without calling reason() first (issue #128) -* in vlog4j-owlapi, class expressions of type ObjectMaxCardinality are not allowed in superclasses (issue #104) -* in vlog4j-owlapi, class expressions of type ObjectOneOf are only allowed as subclasses in axioms of type subClassOf (issue #20) -* When parsing syntactic fragment such as Facts or Literals, the parser now enforces that all input is consumed. - -VLog4j v0.4.0 -------------- - -Breaking changes: -* The Reasoner interface has changed (knowledge base and related methods moved to KnowledgeBase) -* The EdbIdbSeparation is obsolete and does no longer exist -* IRIs loaded from RDF inputs no longer include surrounding < > in their string identifier -* A new interface Fact has replaced the overly general PositiveLiteral in many places - -New features: -* New own syntax for rules, facts, and data sources to create knowledge bases from files or strings in Java -* Input predicates can now be used with multiple sources and in rule heads (no more EDB-IDB distinction) -* New InMemoryDataSource for efficient in-memory fact loading -* New KnowledgeBase class separates facts, data sources, and rules from the actual Reasoner -* Modifications to the knowledge base are taken into account by the reasoner -* New and updated example programs to illustrate use of syntax - -Other improvements: -* Query results now indicate their guaranteed correctness (example: answers can be incomplete when setting a timeout) -* Faster and more memory-efficient loading of facts -* Better error reporting; improved use of exceptions -* Better logging, especially on the INFO level -* Better code structure and testing - -Bugfixes: -* Several reasoning errors in VLog (backend) have been discovered and fixed in the version used now - - -VLog4j v0.3.0 -------------- - -New features: -* Support for Graal data structures (conversion from Graal model to VLog model objects) -* Stratified negation: rule bodies are conjunctions of positive or negated literals -* SPARQL-based data sources: load remote data from SPARQL endpoints -* Acyclicity and cyclicity checks: JA, RJA, MFA, RMFA, RFC, as well as a generic method that checks whether given set or rules and fact predicates are acyclic, cyclic, or undetermined - -VLog4j v0.2.0 -------------- - -New features: -* supporting File data sources of N-Triples format (.nt file extension) -* supporting g-zipped data source files (.csv.gz, .nt.gz) - -VLog4j v0.1.0 -------------- - -Initial release. - -New features: -* Essential data models for rules and facts, and essential reasoner functionality -* support for reading from RDF files -* support for converting rules from OWL ontology, loaded with the OWL API +Rulewerk Release Notes +====================== + +Rulewerk v0.7.0 +--------------- + +New features: +* New interactive Rulewerk shell for rule reasoning from the command line client +* Significant speedup in iterating over query results +* Support for using data from a Trident database, the recommended data source for large + RDF graphs in VLog +* More features to control how Rulewerk imports RDF data using rulewerk-rdf module +* New class `LiteralQueryResultPrinter` for pretty-printing query results + +Other improvements: +* Improved serialization of knowledge bases (using namespaces) +* Simple (non-IRI, namespace-less) predicate names can now include - and _ +* Nulls in input data (aka "blank nodes") are now properly skolemized for VLog +* InMemoryGraphAnalysisExample now counts proper triangles using negation to avoid "triangles" where + two or more edges are the same. + +Breaking changes: +* The `RdfModelConverter` class from the rdf package is no longer static (and has more options) +* The `Serializer` class in the core package has been replaced by a new implementation + with a completely different interface. +* The methods `getSerialization` that were present in most syntax objects have been removed. Use `toString()` instead for simple serializations, or invoke a custom Serializer. +* The `DataSource` interface requires a new method to be implemented. +* `@import`, `@import-relative`, and `@source` now treat relative paths as relative to the file they occur in, as opposed to the global working directory. + +Rulewerk v0.6.0 +--------------- + +Breaking changes: +* VLog4j is now called Rulewerk. Consequently, the groupId, artifact Ids, and package names + of the project have changed. +* In the examples package, `ExamplesUtils.getQueryAnswerCount(queryString, reasoner)` does no + longer exist. It can be replaced by + `reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral(queryString)).getCount()` +* The `FileDataSource` constructor and those of child classes (`CsvFileDataSource`, `RdfFileDataSource`) + now take the String path to a file instead of `File` object. +* The VLog backend has been moved to a new `rulewerk-vlog` module, + changing several import paths. `Reasoner.getInstance()` is + gone. Furthermore, `InMemoryDataSource` has become an abstract class, + use `VLogInMemoryDataSource` where applicable. + +New features: +* Counting query answers is more efficient now, using `Reasoner.countQueryAnswers()` +* All inferred facts can be serialized to a file using `Reasoner.writeInferences()` +* All inferred facts can be obtained as a Stream using `Reasoner.getInferences()` +* `Reasoner.getCorrectness()` returns the correctness result of the last reasoning task. +* Knowledge bases can be serialized to a file using `KnowlegdeBase.writeKnowledgeBase()` +* Rules files may import other rules files using `@import` and + `@import-relative`, where the latter resolves relative IRIs using + the current base IRI, unless the imported file explicitly specifies + a different one. +* Named nulls of the form `_:name` are now allowed during parsing (but + may not occur in rule bodies). They are renamed to assure that they + are distinct on a per-file level. +* The parser allows custom directives to be implemented, and a certain + set of delimiters allows for custom literal expressions. + +Other improvements: +* Prefix declarations are now kept as part of the Knowledge Base and + are used to abbreviate names when exporting inferences. + +Bugfixes: +* Several reasoning errors in VLog (backend) have been discovered and fixed in the version used now + +VLog4j v0.5.0 +------------- + +Breaking changes: +* The data model for rules has been refined and changed: + * Instead of Constant, specific types of constants are used to capture abtract and data values + * Instead of Variable, ExistentialVariable and UniversalVariable now indicate quantification + * Blank was renamed to NamedNull to avoid confusion with RDF blank nodes + * Methods to access terms now use Java Streams and are unified across syntactic objects +* Data source declarations now use brackets to denote arity, e.g., `@source predicate[2]: load-csv()` + +New features: +* New module vlog4j-client provides a stand-alone command line client jar for VLog4j +* A wiki for VLog4j use and related publications has been created: https://github.com/knowsys/vlog4j/wiki +* The parser behaviour for data source declarations and certain datatype literals can be customised. + +Other improvements: +* Data model is better aligned with syntax supported by parser +* Java object Statements (rules, facts, datasource declarations) String representation is parseable +* OWL API dependency has been upgraded from 4.5.1 to latest (5.1.11) +* SL4J dependency has been upgraded from 1.7.10 to latest (1.7.28) +* Cobertura test coverage tool has been replaced by JaCoCo + +Bugfixes: +* Acyclicity checks work again without calling reason() first (issue #128) +* in vlog4j-owlapi, class expressions of type ObjectMaxCardinality are not allowed in superclasses (issue #104) +* in vlog4j-owlapi, class expressions of type ObjectOneOf are only allowed as subclasses in axioms of type subClassOf (issue #20) +* When parsing syntactic fragment such as Facts or Literals, the parser now enforces that all input is consumed. + +VLog4j v0.4.0 +------------- + +Breaking changes: +* The Reasoner interface has changed (knowledge base and related methods moved to KnowledgeBase) +* The EdbIdbSeparation is obsolete and does no longer exist +* IRIs loaded from RDF inputs no longer include surrounding < > in their string identifier +* A new interface Fact has replaced the overly general PositiveLiteral in many places + +New features: +* New own syntax for rules, facts, and data sources to create knowledge bases from files or strings in Java +* Input predicates can now be used with multiple sources and in rule heads (no more EDB-IDB distinction) +* New InMemoryDataSource for efficient in-memory fact loading +* New KnowledgeBase class separates facts, data sources, and rules from the actual Reasoner +* Modifications to the knowledge base are taken into account by the reasoner +* New and updated example programs to illustrate use of syntax + +Other improvements: +* Query results now indicate their guaranteed correctness (example: answers can be incomplete when setting a timeout) +* Faster and more memory-efficient loading of facts +* Better error reporting; improved use of exceptions +* Better logging, especially on the INFO level +* Better code structure and testing + +Bugfixes: +* Several reasoning errors in VLog (backend) have been discovered and fixed in the version used now + + +VLog4j v0.3.0 +------------- + +New features: +* Support for Graal data structures (conversion from Graal model to VLog model objects) +* Stratified negation: rule bodies are conjunctions of positive or negated literals +* SPARQL-based data sources: load remote data from SPARQL endpoints +* Acyclicity and cyclicity checks: JA, RJA, MFA, RMFA, RFC, as well as a generic method that checks whether given set or rules and fact predicates are acyclic, cyclic, or undetermined + +VLog4j v0.2.0 +------------- + +New features: +* supporting File data sources of N-Triples format (.nt file extension) +* supporting g-zipped data source files (.csv.gz, .nt.gz) + +VLog4j v0.1.0 +------------- + +Initial release. + +New features: +* Essential data models for rules and facts, and essential reasoner functionality +* support for reading from RDF files +* support for converting rules from OWL ontology, loaded with the OWL API From 3a423b7d5932079b80109244d2fedd1de054258f Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 3 Sep 2020 18:49:16 +0200 Subject: [PATCH 0819/1003] add unit tests for @load command completers --- .../shell/DefaultShellConfigurationTest.java | 72 ++++++++++++++++++- 1 file changed, 71 insertions(+), 1 deletion(-) diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfigurationTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfigurationTest.java index 87f369cd9..83c8a5a2a 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfigurationTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfigurationTest.java @@ -42,6 +42,7 @@ import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.mockito.Mockito; +import org.semanticweb.rulewerk.commands.LoadCommandInterpreter; public class DefaultShellConfigurationTest { @@ -86,7 +87,7 @@ public void buildCompleterHelp() { } @Test - public void buildCompleterLoad() { + public void buildCompleterLoad_emptyLine() { final ArrayList readWords = new ArrayList(); readWords.add("@load"); @@ -95,6 +96,75 @@ public void buildCompleterLoad() { assertFalse(candidates.isEmpty()); final String tempFolderName = this.folder.getRoot().getName(); assertTrue(candidates.contains(tempFolderName)); + + assertTrue(candidates.contains(LoadCommandInterpreter.TASK_OWL)); + assertTrue(candidates.contains(LoadCommandInterpreter.TASK_RDF)); + assertTrue(candidates.contains(LoadCommandInterpreter.TASK_RLS)); + } + + @Test + public void buildCompleterLoad_task_OWL() { + final ArrayList readWords = new ArrayList(); + readWords.add("@load"); + readWords.add(LoadCommandInterpreter.TASK_OWL); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + + assertFalse(candidates.isEmpty()); + final String tempFolderName = this.folder.getRoot().getName(); + assertTrue(candidates.contains(tempFolderName)); + + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_OWL)); + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_RDF)); + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_RLS)); + } + + @Test + public void buildCompleterLoad_task_RDF() { + final ArrayList readWords = new ArrayList(); + readWords.add("@load"); + readWords.add(LoadCommandInterpreter.TASK_RDF); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + + assertFalse(candidates.isEmpty()); + final String tempFolderName = this.folder.getRoot().getName(); + assertTrue(candidates.contains(tempFolderName)); + + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_OWL)); + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_RDF)); + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_RLS)); + } + + @Test + public void buildCompleterLoad_task_RLS() { + final ArrayList readWords = new ArrayList(); + readWords.add("@load"); + readWords.add(LoadCommandInterpreter.TASK_RLS); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + + assertFalse(candidates.isEmpty()); + final String tempFolderName = this.folder.getRoot().getName(); + assertTrue(candidates.contains(tempFolderName)); + + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_OWL)); + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_RDF)); + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_RLS)); + } + + @Test + public void buildCompleterLoad_file() { + final ArrayList readWords = new ArrayList(); + readWords.add("@load"); + final String tempFolderName = this.folder.getRoot().getName(); + readWords.add(tempFolderName); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_OWL)); + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_RDF)); + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_RLS)); } private Set getCompleterCandidates(final ArrayList readWords, final String wordToComplete) { From a361d8b009444ac9940eebcedbf49eac91dcc22c Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 3 Sep 2020 18:55:08 +0200 Subject: [PATCH 0820/1003] Parser: Handle importing sibling files by bare name --- .../rulewerk/parser/ParserConfiguration.java | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java index 15d05f3f3..d094124f1 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java @@ -343,10 +343,16 @@ public String getImportBasePath() { /** * Set a new base path for file imports. * - * @param importBasePath path that relative imports will be resolved against. + * @param importBasePath path that relative imports will be + * resolved against. If null, default to current working + * directory. */ public ParserConfiguration setImportBasePath(String importBasePath) { - this.importBasePath = importBasePath; + if (importBasePath != null) { + this.importBasePath = importBasePath; + } else { + this.importBasePath = System.getProperty("user.dir"); + } return this; } From 67fb40f2d21f9399f13875e84fe773bb33be441b Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 3 Sep 2020 19:07:48 +0200 Subject: [PATCH 0821/1003] added unit test for @export and @clear completers --- .../shell/DefaultShellConfigurationTest.java | 94 +++++++++++++++++++ 1 file changed, 94 insertions(+) diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfigurationTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfigurationTest.java index 83c8a5a2a..a1f0bf7f5 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfigurationTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfigurationTest.java @@ -42,6 +42,8 @@ import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.mockito.Mockito; +import org.semanticweb.rulewerk.commands.ClearCommandInterpreter; +import org.semanticweb.rulewerk.commands.ExportCommandInterpreter; import org.semanticweb.rulewerk.commands.LoadCommandInterpreter; public class DefaultShellConfigurationTest { @@ -167,6 +169,98 @@ public void buildCompleterLoad_file() { assertFalse(candidates.contains(LoadCommandInterpreter.TASK_RLS)); } + @Test + public void buildCompleterExport_emptyLine() { + final ArrayList readWords = new ArrayList(); + readWords.add("@export"); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + + final HashSet expectedCandidates = new HashSet<>(); + expectedCandidates.add(ExportCommandInterpreter.TASK_INFERENCES); + expectedCandidates.add(ExportCommandInterpreter.TASK_KB); + + assertEquals(expectedCandidates, candidates); + } + + @Test + public void buildCompleterExport_task_INFERENCES() { + final ArrayList readWords = new ArrayList(); + readWords.add("@export"); + readWords.add(ExportCommandInterpreter.TASK_INFERENCES); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + + final String tempFolderName = this.folder.getRoot().getName(); + assertTrue(candidates.contains(tempFolderName)); + + assertFalse(candidates.contains(ExportCommandInterpreter.TASK_INFERENCES)); + assertFalse(candidates.contains(ExportCommandInterpreter.TASK_KB)); + } + + @Test + public void buildCompleterExport_unknown() { + final ArrayList readWords = new ArrayList(); + readWords.add("@export"); + readWords.add("unknown"); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + assertTrue(candidates.isEmpty()); + } + + @Test + public void buildCompleterExport_task_KB() { + final ArrayList readWords = new ArrayList(); + readWords.add("@export"); + readWords.add(ExportCommandInterpreter.TASK_KB); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + + final String tempFolderName = this.folder.getRoot().getName(); + assertTrue(candidates.contains(tempFolderName)); + + assertFalse(candidates.contains(ExportCommandInterpreter.TASK_INFERENCES)); + assertFalse(candidates.contains(ExportCommandInterpreter.TASK_KB)); + } + + @Test + public void buildCompleterClear_emptyLine() { + final ArrayList readWords = new ArrayList(); + readWords.add("@clear"); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + + final HashSet expectedCandidates = new HashSet<>(); + expectedCandidates.add(ClearCommandInterpreter.TASK_ALL); + expectedCandidates.add(ClearCommandInterpreter.TASK_FACTS); + expectedCandidates.add(ClearCommandInterpreter.TASK_INFERENCES); + expectedCandidates.add(ClearCommandInterpreter.TASK_PREFIXES); + expectedCandidates.add(ClearCommandInterpreter.TASK_RULES); + expectedCandidates.add(ClearCommandInterpreter.TASK_SOURCES); + + assertEquals(expectedCandidates, candidates); + } + + @Test + public void buildCompleterClear_unknown() { + final ArrayList readWords = new ArrayList(); + readWords.add("@clear"); + readWords.add("unknown"); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + assertTrue(candidates.isEmpty()); + } + + @Test + public void buildCompleterClear_task_ALL() { + final ArrayList readWords = new ArrayList(); + readWords.add("@clear"); + readWords.add(ClearCommandInterpreter.TASK_ALL); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + assertTrue(candidates.isEmpty()); + } + private Set getCompleterCandidates(final ArrayList readWords, final String wordToComplete) { final List candidates = new ArrayList<>(); From c087f05aba4d0a2e3fff79d2c63c66498f05b7f4 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 3 Sep 2020 19:56:34 +0200 Subject: [PATCH 0822/1003] Core: Normalise paths for Trident data sources --- .../implementation/TridentDataSource.java | 15 ++++++++++++--- .../vlog/VLogDataSourceConfigurationVisitor.java | 2 +- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java index 213c7df2f..182b04718 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java @@ -1,5 +1,7 @@ package org.semanticweb.rulewerk.core.reasoner.implementation; +import java.io.File; + /*- * #%L * Rulewerk Core Components @@ -46,16 +48,23 @@ public class TridentDataSource implements ReasonerDataSource { public static final String declarationPredicateName = "trident"; final String filePath; + final String fileName; public TridentDataSource(final String filePath) { Validate.notBlank(filePath, "Path to Trident database cannot be blank!"); - this.filePath = filePath; + this.filePath = filePath; // unmodified file path, necessary for correct serialisation + this.fileName = new File(filePath).getName(); } public String getPath() { return this.filePath; } + public String getName() { + return this.fileName; + } + + @Override public Fact getDeclarationFact() { Predicate predicate = Expressions.makePredicate(declarationPredicateName, 1); @@ -65,7 +74,7 @@ public Fact getDeclarationFact() { @Override public String toString() { - return "[TridentDataSource [tridentFile=" + this.filePath + "]"; + return "[TridentDataSource [tridentFile=" + this.fileName + "]"; } @Override @@ -90,7 +99,7 @@ public boolean equals(final Object obj) { return false; } final TridentDataSource other = (TridentDataSource) obj; - return this.filePath.equals(other.getPath()); + return this.fileName.equals(other.getName()); } } diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java index c2bc52c08..2e4f250df 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java @@ -82,7 +82,7 @@ public void visit(SparqlQueryResultDataSource dataSource) { public void visit(TridentDataSource dataSource) { this.configString = PREDICATE_NAME_CONFIG_LINE + DATASOURCE_TYPE_CONFIG_PARAM + "=" // + TRIDENT_DATASOURCE_TYPE_CONFIG_VALUE + "\n" // - + "EDB%1$d_param0=" + dataSource.getPath() + "\n"; + + "EDB%1$d_param0=" + dataSource.getName() + "\n"; } @Override From b53d62c37c3aaef74ecb02928232151e3eb28651 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 3 Sep 2020 21:34:25 +0200 Subject: [PATCH 0823/1003] update version to snapsot 0.8.0-SNAPSHOT --- coverage/pom.xml | 2 +- pom.xml | 2 +- rulewerk-client/pom.xml | 2 +- rulewerk-commands/pom.xml | 2 +- rulewerk-core/pom.xml | 2 +- rulewerk-examples/pom.xml | 2 +- rulewerk-graal/pom.xml | 2 +- rulewerk-owlapi/pom.xml | 2 +- rulewerk-parser/pom.xml | 2 +- rulewerk-rdf/pom.xml | 2 +- rulewerk-vlog/pom.xml | 2 +- 11 files changed, 11 insertions(+), 11 deletions(-) diff --git a/coverage/pom.xml b/coverage/pom.xml index 40e671d48..cd9c75339 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0 + 0.8.0-SNAPSHOT coverage diff --git a/pom.xml b/pom.xml index 300fb51a4..9367c117b 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0 + 0.8.0-SNAPSHOT pom Rulewerk diff --git a/rulewerk-client/pom.xml b/rulewerk-client/pom.xml index f7b3e3a7c..695e9a6a4 100644 --- a/rulewerk-client/pom.xml +++ b/rulewerk-client/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0 + 0.8.0-SNAPSHOT rulewerk-client diff --git a/rulewerk-commands/pom.xml b/rulewerk-commands/pom.xml index 599a526a5..773e18561 100644 --- a/rulewerk-commands/pom.xml +++ b/rulewerk-commands/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0 + 0.8.0-SNAPSHOT rulewerk-commands diff --git a/rulewerk-core/pom.xml b/rulewerk-core/pom.xml index dc562d9ab..3ab864a66 100644 --- a/rulewerk-core/pom.xml +++ b/rulewerk-core/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0 + 0.8.0-SNAPSHOT rulewerk-core diff --git a/rulewerk-examples/pom.xml b/rulewerk-examples/pom.xml index 739f580cb..341378aec 100644 --- a/rulewerk-examples/pom.xml +++ b/rulewerk-examples/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0 + 0.8.0-SNAPSHOT rulewerk-examples diff --git a/rulewerk-graal/pom.xml b/rulewerk-graal/pom.xml index 34ef21c2d..13631b3cb 100644 --- a/rulewerk-graal/pom.xml +++ b/rulewerk-graal/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0 + 0.8.0-SNAPSHOT rulewerk-graal diff --git a/rulewerk-owlapi/pom.xml b/rulewerk-owlapi/pom.xml index 2b50c042d..be81cdf49 100644 --- a/rulewerk-owlapi/pom.xml +++ b/rulewerk-owlapi/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0 + 0.8.0-SNAPSHOT rulewerk-owlapi diff --git a/rulewerk-parser/pom.xml b/rulewerk-parser/pom.xml index c88769034..e495e5247 100644 --- a/rulewerk-parser/pom.xml +++ b/rulewerk-parser/pom.xml @@ -8,7 +8,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0 + 0.8.0-SNAPSHOT rulewerk-parser diff --git a/rulewerk-rdf/pom.xml b/rulewerk-rdf/pom.xml index ffed1fc13..e64742507 100644 --- a/rulewerk-rdf/pom.xml +++ b/rulewerk-rdf/pom.xml @@ -8,7 +8,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0 + 0.8.0-SNAPSHOT rulewerk-rdf diff --git a/rulewerk-vlog/pom.xml b/rulewerk-vlog/pom.xml index 53fc07b13..a098e8a17 100644 --- a/rulewerk-vlog/pom.xml +++ b/rulewerk-vlog/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0 + 0.8.0-SNAPSHOT rulewerk-vlog From d0243381f229b61ee728c03bfb276a527e8173eb Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 3 Sep 2020 22:15:14 +0200 Subject: [PATCH 0824/1003] Fix javadoc executable path on JDK >= 9 --- pom.xml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pom.xml b/pom.xml index 9367c117b..8a3d6d7b0 100644 --- a/pom.xml +++ b/pom.xml @@ -427,6 +427,9 @@ [9,) + + ${java.home}/bin/javadoc + From b6fc66d0fdaec2a358757913076047c37d72ba0e Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 25 Sep 2020 10:21:38 +0200 Subject: [PATCH 0825/1003] +nonabbr. RDF literal serialisations --- .../core/model/api/DatatypeConstant.java | 14 ++++++++++++++ .../implementation/DatatypeConstantImpl.java | 9 +++++++++ .../core/model/implementation/Serializer.java | 17 ++++++++++++++--- .../core/model/implementation/TermImplTest.java | 2 ++ 4 files changed, 39 insertions(+), 3 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java index 0de18e509..c64955d35 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java @@ -58,4 +58,18 @@ default TermType getType() { */ String getLexicalValue(); + /** + * Returns a string representation of this value that conforms to RDF + * serialisation formats such as Turtle. Turtle supports some abbreviations for + * common types, e.g., by leaving the type away for xsd:string literals, which + * can be enabled or disabled through the functions parameter. + * + * @param useAbbreviations if true, the result may use Turtle-style + * abbreviations to shorten the output where supported; + * otherwise the literal will always be serialised in + * full + * @return RDF-style string serialisation of the value + */ + String getRdfLiteralString(boolean useAbbreviations); + } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java index 8696190c7..f4810bee4 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java @@ -58,6 +58,15 @@ public String getLexicalValue() { return this.lexicalValue; } + @Override + public String getRdfLiteralString(boolean useAbbreviations) { + if (useAbbreviations) { + return Serializer.getSerialization(serializer -> serializer.writeDatatypeConstant(this)); + } else { + return Serializer.getSerialization(serializer -> serializer.writeDatatypeConstantNoAbbreviations(this)); + } + } + @Override public String toString() { return Serializer.getSerialization(serializer -> serializer.writeDatatypeConstant(this)); diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index 810b85a0e..741aba0a5 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -427,12 +427,23 @@ public void writeDatatypeConstant(DatatypeConstant datatypeConstant) throws IOEx } else if (PrefixDeclarationRegistry.XSD_INTEGER.equals(datatypeConstant.getDatatype())) { writer.write(datatypeConstant.getLexicalValue()); } else { - writer.write(getQuotedString(datatypeConstant.getLexicalValue())); - writer.write("^^"); - writer.write(getIri(datatypeConstant.getDatatype())); + writeDatatypeConstantNoAbbreviations(datatypeConstant); } } + /** + * Writes a serialization of the given {@link DatatypeConstant} without using + * any Turtle-style abbreviations for common datatypes like string and int. + * + * @param datatypeConstant a {@link DatatypeConstant} + * @throws IOException + */ + public void writeDatatypeConstantNoAbbreviations(DatatypeConstant datatypeConstant) throws IOException { + writer.write(getQuotedString(datatypeConstant.getLexicalValue())); + writer.write("^^"); + writer.write(getIri(datatypeConstant.getDatatype())); + } + /** * Writes a serialization of the given {@link UniversalVariable}. * diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermImplTest.java index c8230303d..a2fb5e0a0 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermImplTest.java @@ -144,6 +144,8 @@ public void abstractConstantToStringTest() { public void datatypeConstantToStringTest() { DatatypeConstantImpl c = new DatatypeConstantImpl("c", PrefixDeclarationRegistry.XSD_STRING); assertEquals("\"c\"", c.toString()); + assertEquals("\"c\"", c.getRdfLiteralString(true)); + assertEquals("\"c\"^^<" + PrefixDeclarationRegistry.XSD_STRING + ">", c.getRdfLiteralString(false)); } @Test From d63cedaeef6c6fad4b410c831309a1bc9418151a Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 25 Sep 2020 10:22:21 +0200 Subject: [PATCH 0826/1003] Use expanded RDF serialisation for terms --- .../semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java index 3e294f848..ea2f20526 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java @@ -58,7 +58,7 @@ public karmaresearch.vlog.Term visit(final AbstractConstant term) { */ @Override public karmaresearch.vlog.Term visit(final DatatypeConstant term) { - return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, term.getName()); + return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, term.getRdfLiteralString(false)); } /** From 89cd184c6cba3456370a2a61a176bb61bae39a07 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 25 Sep 2020 10:26:51 +0200 Subject: [PATCH 0827/1003] fix problem with xsd:string in RDF --- RELEASE-NOTES.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 2b6ca3371..6ad151e9c 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -1,6 +1,12 @@ Rulewerk Release Notes ====================== +Rulewerk v0.8.0 +--------------- + +Bugfixes: +* Encoding of RDF strings corrected to make sure VLog succeeds joining on strings + Rulewerk v0.7.0 --------------- From 9685dec07a9d783e3040ab838f8fd2cf5d93bcb0 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 25 Sep 2020 10:54:55 +0200 Subject: [PATCH 0828/1003] New test for correct RDF string handling --- .../reasoner/vlog/VLogReasonerRdfInput.java | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerRdfInput.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerRdfInput.java index c64e829ea..b5d068ab6 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerRdfInput.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerRdfInput.java @@ -34,6 +34,7 @@ import org.mockito.internal.util.collections.Sets; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.rulewerk.core.model.implementation.Expressions; @@ -48,6 +49,9 @@ public class VLogReasonerRdfInput { private static final PositiveLiteral queryAtom = Expressions.makePositiveLiteral(ternaryPredicate, Expressions.makeUniversalVariable("s"), Expressions.makeUniversalVariable("p"), Expressions.makeUniversalVariable("o")); + private static final PositiveLiteral queryAtomString = Expressions.makePositiveLiteral(ternaryPredicate, + Expressions.makeUniversalVariable("s"), Expressions.makeUniversalVariable("p"), + Expressions.makeDatatypeConstant("test string", PrefixDeclarationRegistry.XSD_STRING)); @SuppressWarnings("unchecked") private static final Set> expectedTernaryQueryResult = Sets.newSet( @@ -57,6 +61,11 @@ public class VLogReasonerRdfInput { Arrays.asList(Expressions.makeAbstractConstant("http://example.org/c1"), Expressions.makeAbstractConstant("http://example.org/q"), Expressions.makeDatatypeConstant("test string", "http://www.w3.org/2001/XMLSchema#string"))); + @SuppressWarnings("unchecked") + private static final Set> expectedTernaryQueryResultString = Sets + .newSet(Arrays.asList(Expressions.makeAbstractConstant("http://example.org/c1"), + Expressions.makeAbstractConstant("http://example.org/q"), + Expressions.makeDatatypeConstant("test string", "http://www.w3.org/2001/XMLSchema#string"))); @Ignore // TODO test fails for now, because of a VLog bug. Remove the @Ignore annotation @@ -82,6 +91,12 @@ public void testLoadTernaryFactsFromRdfFile() throws IOException { FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.unzippedNtFileRoot + ".nt")); } + @Test + public void queryStringFromRdf_succeeds() throws IOException { + testQueryStringFromSingleRdfDataSource(new RdfFileDataSource( + FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.unzippedNtFileRoot + ".nt")); + } + @Test public void testLoadTernaryFactsFromRdfFileGz() throws IOException { testLoadTernaryFactsFromSingleRdfDataSource(new RdfFileDataSource( @@ -102,6 +117,20 @@ public void testLoadTernaryFactsFromSingleRdfDataSource(final FileDataSource fil } } + public void testQueryStringFromSingleRdfDataSource(final FileDataSource fileDataSource) throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(new DataSourceDeclarationImpl(ternaryPredicate, fileDataSource)); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + + final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtomString, true); + final Set> queryResult = QueryResultsUtils.collectQueryResults(queryResultIterator); + + assertEquals(expectedTernaryQueryResultString, queryResult); + } + } + @Test(expected = IOException.class) public void testLoadNonexistingRdfFile() throws IOException { final File nonexistingFile = new File("nonexistingFile.nt"); From e31140cf90f92cdcaae7a3d41dde2bd3e789e3c0 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 25 Sep 2020 12:12:36 +0200 Subject: [PATCH 0829/1003] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 221f3a1b0..9d92b1b9d 100644 --- a/README.md +++ b/README.md @@ -53,7 +53,7 @@ Development * Pull requests are welcome. * The master branch may require a development version of VLog. -Use the script `build-vlog-library.sh` to create and install it on your machine (you may need to delete previous local builds first). +Use the script `build-vlog-library.sh` to create and install it on your machine (you may need to delete previous local builds "build-vlog" and "local_builds" first). * Users of Eclipse should install the javacc plugin to generate the parser sources. After installing the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.rulewerk.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. * We largely follow [Java Programming Style Guidelines published by Petroware](https://petroware.no/javastyle.html). The main exception are the names of private members, which do not usually end in underscores in our code. * To build the standalone client jar, run `mvn install -Pclient`. This generates `standalone-rulewerk-client-[VERSION].jar` in `rulewerk-client/target`. From cbd2bfd21b94e1d25e52e27b682e64464a3a6c48 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 25 Sep 2020 12:29:45 +0200 Subject: [PATCH 0830/1003] Update README.md change order of items in "Development" section --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 9d92b1b9d..84e98ddd4 100644 --- a/README.md +++ b/README.md @@ -52,8 +52,9 @@ Development ----------- * Pull requests are welcome. +* We largely follow [Java Programming Style Guidelines published by Petroware](https://petroware.no/javastyle.html). The main exception are the names of private members, which do not usually end in underscores in our code. + * The master branch may require a development version of VLog. Use the script `build-vlog-library.sh` to create and install it on your machine (you may need to delete previous local builds "build-vlog" and "local_builds" first). * Users of Eclipse should install the javacc plugin to generate the parser sources. After installing the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.rulewerk.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. -* We largely follow [Java Programming Style Guidelines published by Petroware](https://petroware.no/javastyle.html). The main exception are the names of private members, which do not usually end in underscores in our code. * To build the standalone client jar, run `mvn install -Pclient`. This generates `standalone-rulewerk-client-[VERSION].jar` in `rulewerk-client/target`. From 28deb877ef86f0952a185f79caafe7a631ee2a91 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 25 Sep 2020 12:30:47 +0200 Subject: [PATCH 0831/1003] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 84e98ddd4..1d41c8e3e 100644 --- a/README.md +++ b/README.md @@ -55,6 +55,6 @@ Development * We largely follow [Java Programming Style Guidelines published by Petroware](https://petroware.no/javastyle.html). The main exception are the names of private members, which do not usually end in underscores in our code. * The master branch may require a development version of VLog. -Use the script `build-vlog-library.sh` to create and install it on your machine (you may need to delete previous local builds "build-vlog" and "local_builds" first). +Use the script `build-vlog-library.sh` to create and install it on your machine (you may need to delete previous local builds `build-vlog` and `local_builds` first). * Users of Eclipse should install the javacc plugin to generate the parser sources. After installing the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.rulewerk.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. * To build the standalone client jar, run `mvn install -Pclient`. This generates `standalone-rulewerk-client-[VERSION].jar` in `rulewerk-client/target`. From 2f93127323b6d7829d1ecab8918ffe47cbee9618 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 25 Sep 2020 12:43:01 +0200 Subject: [PATCH 0832/1003] Update README.md vlog-java instead of vlog-base --- README.md | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 1d41c8e3e..c9f64266a 100644 --- a/README.md +++ b/README.md @@ -33,9 +33,9 @@ You need to use Java 1.8 or above. Available source modules include: * **rulewerk-commands**: support for running commands, as done by the client * **rulewerk-vlog**: support for using [VLog](https://github.com/karmaresearch/vlog) as a reasoning backend for Rulewerk. -The released packages use vlog-base, which packages system-dependent binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for Linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows: +The released packages use `vlog-java`, which packages system-dependent binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for Linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows: -* Run [build-vlog-library.sh](https://github.com/knowsys/rulewerk/blob/master/build-vlog-library.sh) or execute the commands in this file manually. This will compile a local jar file on your system, copy it to ```./rulewerk-vlog/lib/jvlog-local.jar```, and install the new jar locally in Maven in place of the distributed version of vlog-base. +* Run [build-vlog-library.sh](https://github.com/knowsys/rulewerk/blob/master/build-vlog-library.sh) or execute the commands in this file manually. This will compile a local jar file on your system, copy it to ```./rulewerk-vlog/lib/jvlog-local.jar```, and install the new jar in your local Maven repository in place of the distributed version of `vlog-java`. * Run ```mvn install``` to test if the setup works @@ -54,7 +54,8 @@ Development * Pull requests are welcome. * We largely follow [Java Programming Style Guidelines published by Petroware](https://petroware.no/javastyle.html). The main exception are the names of private members, which do not usually end in underscores in our code. -* The master branch may require a development version of VLog. -Use the script `build-vlog-library.sh` to create and install it on your machine (you may need to delete previous local builds `build-vlog` and `local_builds` first). +* The master branch may require a development version of [VLog](https://github.com/karmaresearch/vlog). +Use the script `build-vlog-library.sh` to create and install it on your machine (you may need to delete previous local builds `build-vlog` and `local_builds` first). This will compile and install `vlog-java`dependency with the current code in [VLog](https://github.com/karmaresearch/vlog) master branch. + * Users of Eclipse should install the javacc plugin to generate the parser sources. After installing the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.rulewerk.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. * To build the standalone client jar, run `mvn install -Pclient`. This generates `standalone-rulewerk-client-[VERSION].jar` in `rulewerk-client/target`. From 0c31eac0f3967416bcd60660b89224e9c3f7bbe9 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 25 Sep 2020 12:46:28 +0200 Subject: [PATCH 0833/1003] Update README with references to VLog --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index c9f64266a..3fe3b6fbf 100644 --- a/README.md +++ b/README.md @@ -33,7 +33,7 @@ You need to use Java 1.8 or above. Available source modules include: * **rulewerk-commands**: support for running commands, as done by the client * **rulewerk-vlog**: support for using [VLog](https://github.com/karmaresearch/vlog) as a reasoning backend for Rulewerk. -The released packages use `vlog-java`, which packages system-dependent binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for Linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows: +The released packages use `vlog-java`, which packages system-dependent [VLog](https://github.com/karmaresearch/vlog) binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for Linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows: * Run [build-vlog-library.sh](https://github.com/knowsys/rulewerk/blob/master/build-vlog-library.sh) or execute the commands in this file manually. This will compile a local jar file on your system, copy it to ```./rulewerk-vlog/lib/jvlog-local.jar```, and install the new jar in your local Maven repository in place of the distributed version of `vlog-java`. * Run ```mvn install``` to test if the setup works @@ -55,7 +55,7 @@ Development * We largely follow [Java Programming Style Guidelines published by Petroware](https://petroware.no/javastyle.html). The main exception are the names of private members, which do not usually end in underscores in our code. * The master branch may require a development version of [VLog](https://github.com/karmaresearch/vlog). -Use the script `build-vlog-library.sh` to create and install it on your machine (you may need to delete previous local builds `build-vlog` and `local_builds` first). This will compile and install `vlog-java`dependency with the current code in [VLog](https://github.com/karmaresearch/vlog) master branch. +Use the script `build-vlog-library.sh` to create and install it on your machine (you may need to delete previous local builds `build-vlog` and `local_builds` first). This will compile and install `vlog-java`dependency with the current code of [VLog](https://github.com/karmaresearch/vlog) master branch. * Users of Eclipse should install the javacc plugin to generate the parser sources. After installing the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.rulewerk.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. * To build the standalone client jar, run `mvn install -Pclient`. This generates `standalone-rulewerk-client-[VERSION].jar` in `rulewerk-client/target`. From 400510ca21eef27c27a69b1fa757fcc713971104 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 25 Sep 2020 12:51:54 +0200 Subject: [PATCH 0834/1003] Update README explain how to install javacc --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 3fe3b6fbf..ea5f54327 100644 --- a/README.md +++ b/README.md @@ -57,5 +57,5 @@ Development * The master branch may require a development version of [VLog](https://github.com/karmaresearch/vlog). Use the script `build-vlog-library.sh` to create and install it on your machine (you may need to delete previous local builds `build-vlog` and `local_builds` first). This will compile and install `vlog-java`dependency with the current code of [VLog](https://github.com/karmaresearch/vlog) master branch. -* Users of Eclipse should install the javacc plugin to generate the parser sources. After installing the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.rulewerk.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. +* Users of Eclipse should install the [JavaCC Eclipse Plug-in](https://marketplace.eclipse.org/content/javacc-eclipse-plug) to generate the parser sources. After [installing](https://marketplace.eclipse.org/content/javacc-eclipse-plug/help) the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.rulewerk.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. * To build the standalone client jar, run `mvn install -Pclient`. This generates `standalone-rulewerk-client-[VERSION].jar` in `rulewerk-client/target`. From fe0f86f6951b465b62fc113ce4ce665167e73561 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 25 Sep 2020 14:04:24 +0200 Subject: [PATCH 0835/1003] Core: Fix path handling for Trident data sources --- .../core/reasoner/implementation/TridentDataSource.java | 7 +++---- .../datasources/TridentDataSourceDeclarationHandler.java | 7 ++++++- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java index 182b04718..8f698e45b 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java @@ -50,10 +50,10 @@ public class TridentDataSource implements ReasonerDataSource { final String filePath; final String fileName; - public TridentDataSource(final String filePath) { + public TridentDataSource(final String filePath) throws IOException { Validate.notBlank(filePath, "Path to Trident database cannot be blank!"); - this.filePath = filePath; // unmodified file path, necessary for correct serialisation - this.fileName = new File(filePath).getName(); + this.filePath = filePath; // unmodified file path, necessary for correct serialisation + this.fileName = new File(filePath).getCanonicalPath(); } public String getPath() { @@ -64,7 +64,6 @@ public String getName() { return this.fileName; } - @Override public Fact getDeclarationFact() { Predicate predicate = Expressions.makePredicate(declarationPredicateName, 1); diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/TridentDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/TridentDataSourceDeclarationHandler.java index 6640a414e..7ec1627d4 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/TridentDataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/TridentDataSourceDeclarationHandler.java @@ -20,6 +20,7 @@ * #L% */ +import java.io.IOException; import java.util.List; import org.semanticweb.rulewerk.core.model.api.DataSource; @@ -39,6 +40,10 @@ public DataSource handleDataSourceDeclaration(List terms, String importBas String fileName = DataSourceDeclarationHandler.validateFileNameArgument(terms.get(0), "path to Trident database", importBasePath); - return new TridentDataSource(fileName); + try { + return new TridentDataSource(fileName); + } catch (IOException e) { + throw new ParsingException("Could not use trident database \"" + fileName + "\": " + e.getMessage(), e); + } } } From 6989f010472d3030afa7624ff444a988b8841c62 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 25 Sep 2020 14:08:13 +0200 Subject: [PATCH 0836/1003] Update Release-Notes --- RELEASE-NOTES.md | 1 + 1 file changed, 1 insertion(+) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 6ad151e9c..cb12aa85e 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -6,6 +6,7 @@ Rulewerk v0.8.0 Bugfixes: * Encoding of RDF strings corrected to make sure VLog succeeds joining on strings +* Fixed handling of trident databases that are not a direct child of the current working directory Rulewerk v0.7.0 --------------- From 0c982fb05f93cbb8d1d77289ce59d5e195caf8cc Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 25 Sep 2020 15:13:04 +0200 Subject: [PATCH 0837/1003] Update README.md --- README.md | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index ea5f54327..ce411c63a 100644 --- a/README.md +++ b/README.md @@ -33,8 +33,8 @@ You need to use Java 1.8 or above. Available source modules include: * **rulewerk-commands**: support for running commands, as done by the client * **rulewerk-vlog**: support for using [VLog](https://github.com/karmaresearch/vlog) as a reasoning backend for Rulewerk. -The released packages use `vlog-java`, which packages system-dependent [VLog](https://github.com/karmaresearch/vlog) binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for Linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows: - +The released **rulewerk-vlog** packages use [`vlog-java`](https://search.maven.org/search?q=a:vlog-java), which packages system-dependent [VLog](https://github.com/karmaresearch/vlog) binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for Linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows +* (Optional) It is recommended to increase the version of `vlog-java` (in `rulewerk-vlog/pom.xml`) before executing the next steps. * Run [build-vlog-library.sh](https://github.com/knowsys/rulewerk/blob/master/build-vlog-library.sh) or execute the commands in this file manually. This will compile a local jar file on your system, copy it to ```./rulewerk-vlog/lib/jvlog-local.jar```, and install the new jar in your local Maven repository in place of the distributed version of `vlog-java`. * Run ```mvn install``` to test if the setup works @@ -55,7 +55,9 @@ Development * We largely follow [Java Programming Style Guidelines published by Petroware](https://petroware.no/javastyle.html). The main exception are the names of private members, which do not usually end in underscores in our code. * The master branch may require a development version of [VLog](https://github.com/karmaresearch/vlog). -Use the script `build-vlog-library.sh` to create and install it on your machine (you may need to delete previous local builds `build-vlog` and `local_builds` first). This will compile and install `vlog-java`dependency with the current code of [VLog](https://github.com/karmaresearch/vlog) master branch. +Use the script `build-vlog-library.sh` [as shown here](#anchor-build-vlog) to create and install it on your machine (you may need to delete previous local builds `build-vlog` and `local_builds` first). This will compile and install `vlog-java`dependency with the current code of [VLog](https://github.com/karmaresearch/vlog) master branch. + + * Users of Eclipse should install the [JavaCC Eclipse Plug-in](https://marketplace.eclipse.org/content/javacc-eclipse-plug) to generate the parser sources. After [installing](https://marketplace.eclipse.org/content/javacc-eclipse-plug/help) the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.rulewerk.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. * To build the standalone client jar, run `mvn install -Pclient`. This generates `standalone-rulewerk-client-[VERSION].jar` in `rulewerk-client/target`. From a95098c8e18e12d9a7dd7911e77e2391c9089419 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 25 Sep 2020 15:15:02 +0200 Subject: [PATCH 0838/1003] Update README.md --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index ce411c63a..902b79b8d 100644 --- a/README.md +++ b/README.md @@ -33,10 +33,10 @@ You need to use Java 1.8 or above. Available source modules include: * **rulewerk-commands**: support for running commands, as done by the client * **rulewerk-vlog**: support for using [VLog](https://github.com/karmaresearch/vlog) as a reasoning backend for Rulewerk. -The released **rulewerk-vlog** packages use [`vlog-java`](https://search.maven.org/search?q=a:vlog-java), which packages system-dependent [VLog](https://github.com/karmaresearch/vlog) binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for Linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows +The released **rulewerk-vlog** packages use [`vlog-java`](https://search.maven.org/search?q=a:vlog-java), which packages system-dependent [VLog](https://github.com/karmaresearch/vlog) binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for Linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows * (Optional) It is recommended to increase the version of `vlog-java` (in `rulewerk-vlog/pom.xml`) before executing the next steps. -* Run [build-vlog-library.sh](https://github.com/knowsys/rulewerk/blob/master/build-vlog-library.sh) or execute the commands in this file manually. This will compile a local jar file on your system, copy it to ```./rulewerk-vlog/lib/jvlog-local.jar```, and install the new jar in your local Maven repository in place of the distributed version of `vlog-java`. -* Run ```mvn install``` to test if the setup works +* Run [build-vlog-library.sh](https://github.com/knowsys/rulewerk/blob/master/build-vlog-library.sh) or execute the commands in this file manually. This will compile a local jar file on your system, copy it to ```rulewerk-vlog/lib/jvlog-local.jar```, and install the new jar in your local Maven repository in place of the distributed version of `vlog-java`. +* Run ```mvn install``` to test if the setup works From 969632be31ab44787890f976739063f5bfd02cbd Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 25 Sep 2020 15:24:11 +0200 Subject: [PATCH 0839/1003] Update README.md --- README.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 902b79b8d..c01e8d5ff 100644 --- a/README.md +++ b/README.md @@ -33,8 +33,9 @@ You need to use Java 1.8 or above. Available source modules include: * **rulewerk-commands**: support for running commands, as done by the client * **rulewerk-vlog**: support for using [VLog](https://github.com/karmaresearch/vlog) as a reasoning backend for Rulewerk. -The released **rulewerk-vlog** packages use [`vlog-java`](https://search.maven.org/search?q=a:vlog-java), which packages system-dependent [VLog](https://github.com/karmaresearch/vlog) binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for Linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows +The released **rulewerk-vlog** packages use [`vlog-java`](https://search.maven.org/search?q=a:vlog-java), which packages system-dependent [VLog](https://github.com/karmaresearch/vlog) binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for Linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows: * (Optional) It is recommended to increase the version of `vlog-java` (in `rulewerk-vlog/pom.xml`) before executing the next steps. +* Delete (if existing) previous local builds (`local_builds` directory) * Run [build-vlog-library.sh](https://github.com/knowsys/rulewerk/blob/master/build-vlog-library.sh) or execute the commands in this file manually. This will compile a local jar file on your system, copy it to ```rulewerk-vlog/lib/jvlog-local.jar```, and install the new jar in your local Maven repository in place of the distributed version of `vlog-java`. * Run ```mvn install``` to test if the setup works @@ -55,7 +56,7 @@ Development * We largely follow [Java Programming Style Guidelines published by Petroware](https://petroware.no/javastyle.html). The main exception are the names of private members, which do not usually end in underscores in our code. * The master branch may require a development version of [VLog](https://github.com/karmaresearch/vlog). -Use the script `build-vlog-library.sh` [as shown here](#anchor-build-vlog) to create and install it on your machine (you may need to delete previous local builds `build-vlog` and `local_builds` first). This will compile and install `vlog-java`dependency with the current code of [VLog](https://github.com/karmaresearch/vlog) master branch. +Use the script `build-vlog-library.sh` [as shown here](#anchor-build-vlog) to create and install it on your machine. This will compile and install `vlog-java`dependency with the current code of [VLog](https://github.com/karmaresearch/vlog) master branch. From 7fdfcb92ede15b5919eaafcf6ebf852948ead166 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 25 Sep 2020 15:24:55 +0200 Subject: [PATCH 0840/1003] Update README.md --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index c01e8d5ff..4076efe3c 100644 --- a/README.md +++ b/README.md @@ -35,9 +35,9 @@ You need to use Java 1.8 or above. Available source modules include: The released **rulewerk-vlog** packages use [`vlog-java`](https://search.maven.org/search?q=a:vlog-java), which packages system-dependent [VLog](https://github.com/karmaresearch/vlog) binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for Linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows: * (Optional) It is recommended to increase the version of `vlog-java` (in `rulewerk-vlog/pom.xml`) before executing the next steps. -* Delete (if existing) previous local builds (`local_builds` directory) +* Delete (if existing) previous local builds (`local_builds` directory). * Run [build-vlog-library.sh](https://github.com/knowsys/rulewerk/blob/master/build-vlog-library.sh) or execute the commands in this file manually. This will compile a local jar file on your system, copy it to ```rulewerk-vlog/lib/jvlog-local.jar```, and install the new jar in your local Maven repository in place of the distributed version of `vlog-java`. -* Run ```mvn install``` to test if the setup works +* Run ```mvn install``` to test if the setup works. From f71880eea9213dc4972a3d256741fa56aa62fb1b Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 25 Sep 2020 16:29:27 +0200 Subject: [PATCH 0841/1003] fix inconsistent conversion of string constants --- .../rulewerk/reasoner/vlog/TermToVLogConverter.java | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java index ea2f20526..ac4cbfae8 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java @@ -58,7 +58,7 @@ public karmaresearch.vlog.Term visit(final AbstractConstant term) { */ @Override public karmaresearch.vlog.Term visit(final DatatypeConstant term) { - return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, term.getRdfLiteralString(false)); + return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, getVLogNameForConstant(term)); } /** @@ -78,11 +78,10 @@ public karmaresearch.vlog.Term visit(final LanguageStringConstant term) { * @return VLog constant string */ public static String getVLogNameForConstant(final Constant constant) { - final String constantName = constant.getName(); if (constant.getType() == TermType.ABSTRACT_CONSTANT) { - return getVLogNameForIRI(constantName); + return getVLogNameForIRI(constant.getName()); } else { // datatype literal - return constantName; + return ((DatatypeConstant)constant).getRdfLiteralString(false); } } From 6fb28d0530d887dbbdac47751450d53aef8d2e51 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 1 Oct 2020 11:13:34 +0200 Subject: [PATCH 0842/1003] Fixed exception when lang-strings are sent to VLog --- RELEASE-NOTES.md | 1 + .../rulewerk/reasoner/vlog/TermToVLogConverter.java | 7 ++++++- .../rulewerk/reasoner/vlog/ModelToVLogConverterTest.java | 1 + 3 files changed, 8 insertions(+), 1 deletion(-) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index cb12aa85e..faf83bafe 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -7,6 +7,7 @@ Rulewerk v0.8.0 Bugfixes: * Encoding of RDF strings corrected to make sure VLog succeeds joining on strings * Fixed handling of trident databases that are not a direct child of the current working directory +* Fixed encoding of language-tagged strings that are used in Rulewerk facts, which had caused an exception Rulewerk v0.7.0 --------------- diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java index ac4cbfae8..ee43f3068 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java @@ -1,5 +1,6 @@ package org.semanticweb.rulewerk.reasoner.vlog; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.AbstractConstant; import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; @@ -80,8 +81,12 @@ public karmaresearch.vlog.Term visit(final LanguageStringConstant term) { public static String getVLogNameForConstant(final Constant constant) { if (constant.getType() == TermType.ABSTRACT_CONSTANT) { return getVLogNameForIRI(constant.getName()); - } else { // datatype literal + } else if (constant.getType() == TermType.DATATYPE_CONSTANT) { return ((DatatypeConstant)constant).getRdfLiteralString(false); + } else if (constant.getType() == TermType.LANGSTRING_CONSTANT) { + return constant.getName(); + } else { + throw new RulewerkRuntimeException("Unexpected term type: " + constant.getType()); } } diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ModelToVLogConverterTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ModelToVLogConverterTest.java index 70c188ffc..c3a2f3c99 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ModelToVLogConverterTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ModelToVLogConverterTest.java @@ -111,6 +111,7 @@ public void testToVLogTermLanguageStringConstant() { assertEquals(expectedVLogTerm, vLogTerm); assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstantName(constant.getName())); + assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstant(constant)); } @Test From 43b4be5387cfae94d490ae284dc3380109c99e75 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 13 Oct 2020 08:33:45 +0000 Subject: [PATCH 0843/1003] Bump junit from 4.12 to 4.13.1 Bumps [junit](https://github.com/junit-team/junit4) from 4.12 to 4.13.1. - [Release notes](https://github.com/junit-team/junit4/releases) - [Changelog](https://github.com/junit-team/junit4/blob/main/doc/ReleaseNotes4.12.md) - [Commits](https://github.com/junit-team/junit4/compare/r4.12...r4.13.1) Signed-off-by: dependabot[bot] --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 8a3d6d7b0..10123a1c3 100644 --- a/pom.xml +++ b/pom.xml @@ -76,7 +76,7 @@ UTF-8 - 4.12 + 4.13.1 2.28.2 1.7.28 3.9 From d97bd54e5abe293af688ffc81ce786dd3e63e873 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 8 Jan 2021 12:26:29 +0100 Subject: [PATCH 0844/1003] Core: Fix uninitialised base IRI falsely treated as set in unresolve When unresolving relative IRIs, an uninitialised base IRI would falsely be treated as set, resulting in an exception. Fixes #197. --- .../AbstractPrefixDeclarationRegistry.java | 6 +++--- .../MergingPrefixDeclarationRegistryTest.java | 14 ++++++++++---- 2 files changed, 13 insertions(+), 7 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java index 851dbe8fb..0d8de1736 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -115,7 +115,7 @@ public String unresolveAbsoluteIri(String iri, boolean addIriBrackets) { if (addIriBrackets) { if (!iri.contains(":") && iri.matches(REGEXP_LOCNAME)) { shortestIri = iri; - if (!PrefixDeclarationRegistry.EMPTY_BASE.equals(baseIri)) { + if (baseIri != null && !PrefixDeclarationRegistry.EMPTY_BASE.equals(baseIri)) { throw new RulewerkRuntimeException("Relative IRIs cannot be serialized when a base is declared."); } } else { diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistryTest.java index 6ea303c2c..946d2bf17 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistryTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -104,7 +104,7 @@ public void setPrefixIri_redeclarePrefix_succeeds() throws PrefixDeclarationExce assertEquals(BASE, prefixDeclarations.getPrefixIri("eg:")); assertEquals(2, StreamSupport.stream(prefixDeclarations.spliterator(), false).count()); } - + @Test public void clearPrefix_succeeds() throws PrefixDeclarationException { prefixDeclarations.setPrefixIri("eg:", BASE); @@ -112,7 +112,7 @@ public void clearPrefix_succeeds() throws PrefixDeclarationException { prefixDeclarations.clear(); assertEquals(0, StreamSupport.stream(prefixDeclarations.spliterator(), false).count()); } - + @Test public void setPrefixIri_setSamePrefix_succeeds() throws PrefixDeclarationException { prefixDeclarations.setPrefixIri("eg:", BASE); @@ -229,4 +229,10 @@ public void unresolveAbsoluteIri_absoluteIriMergedOntoEmptyBase_staysAbsolute() assertNotEquals(RELATIVE, resolvedIri); assertEquals("rw_gen0:" + RELATIVE, resolvedIri); } + + @Test + public void unresolveAbsoluteIri_relativeIriAfterClear_succeeds() throws PrefixDeclarationException { + prefixDeclarations.clear(); + assertEquals(RELATIVE, prefixDeclarations.unresolveAbsoluteIri(RELATIVE, true)); + } } From 1965dd7740296256c9c7e6d08bb264b5202964e0 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 13 Jan 2021 18:56:01 +0100 Subject: [PATCH 0845/1003] Core: Fix a potential race condition with file data sources `FileDataSource` objects have a list of allowed extensions that is validated on creation. Up to now, this list was processed in an unspecified order, which might result in the wrong extension being picked should one be a suffix of another (this is not the case for any of our data sources). Instead of processing the extensions in parallel, we now use a sequential stream, which avoids this problem, and, as a side effect, fixes #198. --- .../core/reasoner/implementation/FileDataSource.java | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java index 68cbf2f68..1fbb8ee2a 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java @@ -52,7 +52,9 @@ public abstract class FileDataSource implements ReasonerDataSource { * * @param filePath path to a file that will serve as storage for fact * terms. - * @param possibleExtensions a list of extensions that the files could have + * @param possibleExtensions a list of extensions that the files could have. + * Extensions are tried in the given order, no extension + * in the list can be a suffix of a later extension. * @throws IOException if the path of the given {@code file} is * invalid. * @throws IllegalArgumentException if the extension of the given {@code file} @@ -70,7 +72,9 @@ public FileDataSource(final String filePath, final Iterable possibleExte } private String getValidExtension(final String fileName, final Iterable possibleExtensions) { - final Stream extensionsStream = StreamSupport.stream(possibleExtensions.spliterator(), true); + // use a sequential stream here to avoid a potential race + // condition with extensions that are suffixes of one another. + final Stream extensionsStream = StreamSupport.stream(possibleExtensions.spliterator(), false); final Optional potentialExtension = extensionsStream.filter(fileName::endsWith).findFirst(); if (!potentialExtension.isPresent()) { @@ -127,7 +131,7 @@ public Fact getDeclarationFact() { /** * Returns the name of the predicate that is used to define a declaration of * this data source. - * + * * @return */ abstract String getDeclarationPredicateName(); From 9f9c0e06734b6b27bde82e9aa35bbf65bb640a35 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 17 Feb 2021 05:17:27 +0100 Subject: [PATCH 0846/1003] Migrate CI pipeline --- .github/workflows/test.yml | 17 +++++++++++++++++ .gitignore | 1 + shell.nix | 13 +++++++++++++ vlog/default.nix | 24 +++++++++++++++++++++++ vlog/kognac-lz4.patch | 38 +++++++++++++++++++++++++++++++++++++ vlog/kognac.nix | 32 +++++++++++++++++++++++++++++++ vlog/trident-lz4.patch | 27 ++++++++++++++++++++++++++ vlog/trident.nix | 39 ++++++++++++++++++++++++++++++++++++++ vlog/vlog-lz4.patch | 27 ++++++++++++++++++++++++++ vlog/vlog.nix | 38 +++++++++++++++++++++++++++++++++++++ 10 files changed, 256 insertions(+) create mode 100644 .github/workflows/test.yml create mode 100644 shell.nix create mode 100644 vlog/default.nix create mode 100644 vlog/kognac-lz4.patch create mode 100644 vlog/kognac.nix create mode 100644 vlog/trident-lz4.patch create mode 100644 vlog/trident.nix create mode 100644 vlog/vlog-lz4.patch create mode 100644 vlog/vlog.nix diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 000000000..40d8da784 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,17 @@ +name: "Test" +on: + pull_request: + push: +jobs: + tests: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2.3.4 + - uses: cachix/install-nix-action@v12 + with: + nix_path: nixpkgs=channel:nixos-unstable + - uses: cachix/cachix-action@v8 + with: + name: knowsys + authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' + - run: nix-shell --run "mvn test" diff --git a/.gitignore b/.gitignore index b052b97ca..94a267652 100644 --- a/.gitignore +++ b/.gitignore @@ -56,3 +56,4 @@ rulewerk-rdf/src/main/data/output/* rulewerk-vlog/src/test/data/output/* /build-vlog/vlog/ /TAGS +/vlog/result* diff --git a/shell.nix b/shell.nix new file mode 100644 index 000000000..79e46b9c1 --- /dev/null +++ b/shell.nix @@ -0,0 +1,13 @@ +let pkgs = import {}; + + maven = pkgs.maven; + dependencies = import ./vlog; + deps = dependencies.deps // { maven = maven; }; +in pkgs.mkShell { + buildInputsNative = [ maven deps.jdk dependencies.vlog ]; + shellHook = '' + ln -sf ${dependencies.vlog.dev}/jvlog.jar rulewerk-vlog/lib/jvlog-local.jar + mvn initialize -Pdevelopment + mvn install -DskipTests + ''; +} diff --git a/vlog/default.nix b/vlog/default.nix new file mode 100644 index 000000000..3b68e8943 --- /dev/null +++ b/vlog/default.nix @@ -0,0 +1,24 @@ +let pkgs = import {}; + + args = { pkgs = pkgs; + lz4 = pkgs.lz4.override { enableStatic = true; }; + git = pkgs.git; + jdk = pkgs.jdk8_headless; + curl = pkgs.curl; + zlib = pkgs.zlib; + cmake = pkgs.cmake; + cacert = pkgs.cacert; + sparsehash = pkgs.sparsehash; + }; + + kognac = import ./kognac.nix args; + trident = import ./trident.nix (args // { inherit kognac; }); + vlog = import ./vlog.nix (args // { inherit kognac; inherit trident; }); + + deps = builtins.removeAttrs args [ "pkgs" ]; +in +{ inherit vlog; + inherit trident; + inherit kognac; + inherit deps; +} diff --git a/vlog/kognac-lz4.patch b/vlog/kognac-lz4.patch new file mode 100644 index 000000000..0bd93cde3 --- /dev/null +++ b/vlog/kognac-lz4.patch @@ -0,0 +1,38 @@ +diff --git a/CMakeLists.txt b/CMakeLists.txt +index 3a24e70..3079812 100644 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -97,30 +97,9 @@ ENDIF() + + #LZ4 + # we need it statically included, so download it, not only if it cannot be found! +-# find_library(lz4 lz4) +-# find_path (lz4h lz4.h) +-# IF (${lz4h} STREQUAL "lz4h-NOTFOUND") +-# message("Could not find LZ4. I'm going to download it from the GIT repository ...") +- message("Downloading lz4, static version required") +- ExternalProject_Add(git-lz4 +- DOWNLOAD_COMMAND git clone https://github.com/Cyan4973/lz4.git +- DOWNLOAD_DIR external +- SOURCE_DIR external/lz4/ +- CONFIGURE_COMMAND "" +- BUILD_IN_SOURCE 1 +- BUILD_COMMAND make -C lib lib MOREFLAGS=-fPIC +- INSTALL_COMMAND "" +- ) +- ExternalProject_Get_Property(git-lz4 SOURCE_DIR) +- include_directories(${SOURCE_DIR}/lib/) +- add_library(lz4 STATIC IMPORTED) +- set_property(TARGET lz4 PROPERTY IMPORTED_LOCATION ${CMAKE_BINARY_DIR}/external/lz4/lib/${CMAKE_STATIC_LIBRARY_PREFIX}lz4${CMAKE_STATIC_LIBRARY_SUFFIX}) +- add_dependencies(lz4 git-lz4) +- add_dependencies(kognac-o lz4) +-#ELSE() +-# include_directories(lz4h) +-#message("Found LZ4, lz4=${lz4}") +-#ENDIF() ++find_library(lz4 ${CMAKE_STATIC_LIBRARY_PREFIX}lz4${CMAKE_STATIC_LIBRARY_SUFFIX}) ++find_path (lz4h lz4.h) ++include_directories(lz4h) + + #standard include + include_directories(include/) diff --git a/vlog/kognac.nix b/vlog/kognac.nix new file mode 100644 index 000000000..b239aca66 --- /dev/null +++ b/vlog/kognac.nix @@ -0,0 +1,32 @@ +{ pkgs, lz4, git, zlib, cmake, cacert, sparsehash, ...}: +pkgs.stdenv.mkDerivation { + name = "kognac-unstable-2020-12-01"; + src = pkgs.fetchgit { + url = "git://github.com/karmaresearch/kognac"; + rev = "8430b081f8d76b11fa6858f3ec31a9ea5a5cf6a9"; + sha256 = "0mhmidbmcwql5h2qjfz3yvfhp79farx5j3cbdpisimk1zmwlzxjf"; + }; + + buildInputs = [ zlib sparsehash lz4 ]; + nativeBuildInputs = [ cmake git cacert ]; + outputs = [ "out" "lib" "dev" ]; + + cmakeFlags = [ "-DCMAKE_CXX_FLAGS=-w" ]; + patches = [ ./kognac-lz4.patch ]; + + installPhase = '' + mkdir -p $out + cp ./kognac_exec $out/ + + mkdir -p $lib + cp ./libkognac-core.so $lib/ + + mkdir -p $dev + cp -R $src/include/kognac/ $dev/ + cp -R $src/include/zstr/ $dev/ + ''; + + postFixup = '' + patchelf --set-rpath $lib:$(patchelf --print-rpath $out/kognac_exec) $out/kognac_exec + ''; +} diff --git a/vlog/trident-lz4.patch b/vlog/trident-lz4.patch new file mode 100644 index 000000000..d0b97f10f --- /dev/null +++ b/vlog/trident-lz4.patch @@ -0,0 +1,27 @@ +diff --git a/CMakeLists.txt b/CMakeLists.txt +index 439b296..41dfa66 100644 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -121,19 +121,9 @@ IF (${sparsehash} STREQUAL "sparsehash-NOTFOUND") + ENDIF() + + #LZ4 +-# find_library(lz4 lz4) +-# find_path(lz4h lz4.h) +-# IF (${lz4h} STREQUAL "lz4h-NOTFOUND") +-# message("Could not find LZ4. I'm going to use the version from kognac") +- message("I'm going to use LZ4 the version from kognac") +- include_directories(${KOGNAC_LIB}/external/lz4/lib/) +- LINK_DIRECTORIES(${KOGNAC_LIB}/external/lz4/lib) +- add_library(lz4 STATIC IMPORTED) +- set_property(TARGET lz4 PROPERTY IMPORTED_LOCATION ${KOGNAC_LIB}/external/lz4/lib/${CMAKE_STATIC_LIBRARY_PREFIX}lz4${CMAKE_STATIC_LIBRARY_SUFFIX}) +-# ELSE() +-# include_directories(lz4h) +-# message("-- Found LZ4") +-# ENDIF() ++find_library(lz4 ${CMAKE_STATIC_LIBRARY_PREFIX}lz4${CMAKE_STATIC_LIBRARY_SUFFIX}) ++find_path(lz4h lz4.h) ++include_directories(lz4h) + + #LZ4 + # find_library(lz4 lz4) diff --git a/vlog/trident.nix b/vlog/trident.nix new file mode 100644 index 000000000..c7c222e51 --- /dev/null +++ b/vlog/trident.nix @@ -0,0 +1,39 @@ +{ pkgs, lz4, git, zlib, cmake, cacert, sparsehash, kognac, ... }: +pkgs.stdenv.mkDerivation { + name = "trident-unstable-2021-02-05"; + src = pkgs.fetchgit { + url = "git://github.com/karmaresearch/trident"; + rev = "53630ea83460b5e78851b753f245efaefbcaa57f"; + sha256 = "1irjdzjxzwakgalliry23vcl5iqf0w5bm82wra91mlyqmgirnk2x"; + }; + + buildInputs = [ zlib sparsehash lz4 ]; + nativeBuildInputs = [ cmake git cacert ]; + outputs = [ "out" "lib" "dev" ]; + + cmakeFlags = [ "-DSPARQL=1" + "-DCMAKE_CXX_FLAGS=-w" + "-DKOGNAC_LIB=${kognac.lib}" + "-DKOGNAC_INC=${kognac.dev}" + ]; + patches = [ ./trident-lz4.patch ]; + + installPhase = '' + mkdir -p $out + cp ./trident $out/ + + mkdir -p $lib/ + cp ./libtrident-core.so $lib/ + cp ./libtrident-sparql.so $lib/ + + mkdir -p $dev/ + cp -R $src/include/trident $dev/ + cp -R $src/include/layers $dev/ + cp -R $src/rdf3x/include $dev/ + ''; + + postFixup = '' + patchelf --set-rpath $lib:$(patchelf --print-rpath $out/trident) $out/trident + patchelf --set-rpath $lib:$(patchelf --print-rpath $lib/libtrident-sparql.so) $lib/libtrident-sparql.so + ''; +} diff --git a/vlog/vlog-lz4.patch b/vlog/vlog-lz4.patch new file mode 100644 index 000000000..0a628f5fb --- /dev/null +++ b/vlog/vlog-lz4.patch @@ -0,0 +1,27 @@ +diff --git a/CMakeLists.txt b/CMakeLists.txt +index b72b116..241a32d 100644 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -120,19 +120,9 @@ IF (${sparsehash} STREQUAL "sparsehash-NOTFOUND") + ENDIF() + + #LZ4 +-# find_library(lz4 lz4) +-# find_path(lz4h lz4.h) +-# IF (${lz4h} STREQUAL "lz4h-NOTFOUND") +-# message("Could not find LZ4. I'm going to use the version from kognac") +- message("I'm going to use LZ4 the version from kognac") +- include_directories(${KOGNAC_LIB}/external/lz4/lib/) +- LINK_DIRECTORIES(${KOGNAC_LIB}/external/lz4/lib) +- add_library(lz4 STATIC IMPORTED) +- set_property(TARGET lz4 PROPERTY IMPORTED_LOCATION ${KOGNAC_LIB}/external/lz4/lib/${CMAKE_STATIC_LIBRARY_PREFIX}lz4${CMAKE_STATIC_LIBRARY_SUFFIX}) +-# ELSE() +-# include_directories(lz4h) +-# message("-- Found LZ4") +-# ENDIF() ++find_library(lz4 ${CMAKE_STATIC_LIBRARY_PREFIX}lz4${CMAKE_STATIC_LIBRARY_SUFFIX}) ++find_path(lz4h lz4.h) ++include_directories(lz4h) + + IF (JAVA) + find_package(Java REQUIRED) diff --git a/vlog/vlog.nix b/vlog/vlog.nix new file mode 100644 index 000000000..1dd186c47 --- /dev/null +++ b/vlog/vlog.nix @@ -0,0 +1,38 @@ +{ pkgs, lz4, git, jdk, curl, zlib, cmake, cacert, sparsehash, kognac, trident, ... }: +pkgs.stdenv.mkDerivation { + name = "vlog"; + src = pkgs.fetchgit { + url = "git://github.com/karmaresearch/vlog"; + rev = "c20fa48fc284b333ce03e63ca3ad97dc51701542"; + sha256 = "0y1zv4bwb84rv09ihc8jc11hxxffrspk8v01s28cv2nymg2306q4"; + }; + + buildInputs = [ kognac trident sparsehash jdk curl lz4 ]; + nativeBuildInputs = [ cmake git cacert ]; + outputs = [ "out" "lib" "dev" ]; + + cmakeFlags = [ "-DJAVA=1" + "-DSPARQL=1" + "-DCMAKE_CXX_FLAGS=-w" + "-DKOGNAC_LIB=${kognac.lib}" + "-DKOGNAC_INC=${kognac.dev}" + "-DTRIDENT_LIB=${trident.lib}" + "-DTRIDENT_INC=${trident.dev}" + ]; + patches = [ ./vlog-lz4.patch ]; + + postInstall = '' + mkdir -p $out + cp ./vlog $out/ + + mkdir -p $lib + cp ./libvlog-core.so $lib/ + + mkdir -p $dev + cp ./jvlog.jar $dev/ + ''; + + postFixup = '' + patchelf --set-rpath $lib:$(patchelf --print-rpath $out/vlog) $out/vlog + ''; +} From 5bc28147fad146ca056cfb5e08b3912d4f881661 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 17 Feb 2021 05:27:37 +0100 Subject: [PATCH 0847/1003] Ensure target directory for local jvlog exists --- .gitignore | 3 +++ shell.nix | 3 ++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 94a267652..1def40cfa 100644 --- a/.gitignore +++ b/.gitignore @@ -29,6 +29,9 @@ nbactions.xml out/ target/ +# local jvlog +/rulewerk-vlog/lib/ + # Don't apply the above to src/ where Java requires # subdirectories named according to package names. # We do not want to forbid things like "dumpfiles" in diff --git a/shell.nix b/shell.nix index 79e46b9c1..831ce632a 100644 --- a/shell.nix +++ b/shell.nix @@ -5,7 +5,8 @@ let pkgs = import {}; deps = dependencies.deps // { maven = maven; }; in pkgs.mkShell { buildInputsNative = [ maven deps.jdk dependencies.vlog ]; - shellHook = '' + shellHook = ''' + mkdir -p rulewerk-vlog/lib/ ln -sf ${dependencies.vlog.dev}/jvlog.jar rulewerk-vlog/lib/jvlog-local.jar mvn initialize -Pdevelopment mvn install -DskipTests From 443fd118bece226720d0a2b2692c0dce5c8183c5 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 17 Feb 2021 05:40:42 +0100 Subject: [PATCH 0848/1003] Update badge & migrate coverage --- .github/workflows/test.yml | 5 ++++- README.md | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 40d8da784..1f2450b58 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -14,4 +14,7 @@ jobs: with: name: knowsys authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' - - run: nix-shell --run "mvn test" + - run: nix-shell --run "mvn test jacoco:report" + - uses: coverallsapp/github-action@master + with: + github-token: ${{ secrets.GITHUB_TOKEN }} diff --git a/README.md b/README.md index 4076efe3c..19e7d43f5 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ Rulewerk ====== -[![Build Status](https://travis-ci.org/knowsys/rulewerk.png?branch=master)](https://travis-ci.org/knowsys/rulewerk) +[![Build Status](https://github.com/knowsys/rulewerk/workflows/test/badge.svg?branch=master)](https://github.com/knowsys/rulewerk/actions?query=workflow:test) [![Coverage Status](https://coveralls.io/repos/github/knowsys/rulewerk/badge.svg?branch=master)](https://coveralls.io/github/knowsys/rulewerk?branch=master) [![Maven Central](https://maven-badges.herokuapp.com/maven-central/org.semanticweb.rulewerk/rulewerk-parent/badge.svg)](http://search.maven.org/#search|ga|1|g%3A%22org.semanticweb.rulewerk%22) From 04b2b48a6867bb5efc4c2efbcbc2dca0cc52a85a Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 17 Feb 2021 05:54:35 +0100 Subject: [PATCH 0849/1003] Try to fix coveralls reporting --- .github/workflows/test.yml | 10 ++++++---- shell.nix | 4 ++-- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 1f2450b58..2230a09cb 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -14,7 +14,9 @@ jobs: with: name: knowsys authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' - - run: nix-shell --run "mvn test jacoco:report" - - uses: coverallsapp/github-action@master - with: - github-token: ${{ secrets.GITHUB_TOKEN }} + - env: + BRANCH_NAME_OR_REF: ${{ github.head_ref || github.ref }} + run: echo "::set-env name=BRANCH_NAME::${BRANCH_NAME_OR_REF#refs/heads/}" + - run: | + echo "::set-env name=PR_NUMBER::$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")" + - run: nix-shell --run "mvn --no-transfer-progress test jacoco:report && mvn coveralls:report -D repoToken=${{ secrets.COVERALLS_TOKEN }} -D serviceName=github -D serviceBuildUrl=https://github.com/${{ github.repository }}/commit/${{ github.sha }}/checks -D branch=$BRANCH_NAME -D pullRequest=$PR_NUMBER" diff --git a/shell.nix b/shell.nix index 831ce632a..959415135 100644 --- a/shell.nix +++ b/shell.nix @@ -8,7 +8,7 @@ in pkgs.mkShell { shellHook = ''' mkdir -p rulewerk-vlog/lib/ ln -sf ${dependencies.vlog.dev}/jvlog.jar rulewerk-vlog/lib/jvlog-local.jar - mvn initialize -Pdevelopment - mvn install -DskipTests + mvn --no-transfer-progress initialize -Pdevelopment + mvn --no-transfer-progress install -DskipTests ''; } From 3a7ac99721722ec2c6749793aeae9a18cce25f57 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 17 Feb 2021 06:00:55 +0100 Subject: [PATCH 0850/1003] Use ENV files instead of deprecated ::set-env --- .github/workflows/{test.yml => tests.yml} | 7 +++---- README.md | 2 +- 2 files changed, 4 insertions(+), 5 deletions(-) rename .github/workflows/{test.yml => tests.yml} (77%) diff --git a/.github/workflows/test.yml b/.github/workflows/tests.yml similarity index 77% rename from .github/workflows/test.yml rename to .github/workflows/tests.yml index 2230a09cb..7dc431f49 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/tests.yml @@ -1,4 +1,4 @@ -name: "Test" +name: "Tests" on: pull_request: push: @@ -16,7 +16,6 @@ jobs: authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' - env: BRANCH_NAME_OR_REF: ${{ github.head_ref || github.ref }} - run: echo "::set-env name=BRANCH_NAME::${BRANCH_NAME_OR_REF#refs/heads/}" - - run: | - echo "::set-env name=PR_NUMBER::$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")" + run: echo "name=BRANCH_NAME::${BRANCH_NAME_OR_REF#refs/heads/}" >> $GITHUB_ENV + - run: echo "name=PR_NUMBER::$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")" >> $GITHUB_ENV - run: nix-shell --run "mvn --no-transfer-progress test jacoco:report && mvn coveralls:report -D repoToken=${{ secrets.COVERALLS_TOKEN }} -D serviceName=github -D serviceBuildUrl=https://github.com/${{ github.repository }}/commit/${{ github.sha }}/checks -D branch=$BRANCH_NAME -D pullRequest=$PR_NUMBER" diff --git a/README.md b/README.md index 19e7d43f5..736363c96 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ Rulewerk ====== -[![Build Status](https://github.com/knowsys/rulewerk/workflows/test/badge.svg?branch=master)](https://github.com/knowsys/rulewerk/actions?query=workflow:test) +[![Build Status](https://github.com/knowsys/rulewerk/workflows/Tests/badge.svg?branch=master)](https://github.com/knowsys/rulewerk/actions?query=workflow:Tests) [![Coverage Status](https://coveralls.io/repos/github/knowsys/rulewerk/badge.svg?branch=master)](https://coveralls.io/github/knowsys/rulewerk?branch=master) [![Maven Central](https://maven-badges.herokuapp.com/maven-central/org.semanticweb.rulewerk/rulewerk-parent/badge.svg)](http://search.maven.org/#search|ga|1|g%3A%22org.semanticweb.rulewerk%22) From c7bd8f68068b30ff29f404f1fc2eb00c2e82b000 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 17 Feb 2021 06:04:58 +0100 Subject: [PATCH 0851/1003] Disable transfer progress for coveralls invocation --- .github/workflows/tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 7dc431f49..46a511af6 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -18,4 +18,4 @@ jobs: BRANCH_NAME_OR_REF: ${{ github.head_ref || github.ref }} run: echo "name=BRANCH_NAME::${BRANCH_NAME_OR_REF#refs/heads/}" >> $GITHUB_ENV - run: echo "name=PR_NUMBER::$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")" >> $GITHUB_ENV - - run: nix-shell --run "mvn --no-transfer-progress test jacoco:report && mvn coveralls:report -D repoToken=${{ secrets.COVERALLS_TOKEN }} -D serviceName=github -D serviceBuildUrl=https://github.com/${{ github.repository }}/commit/${{ github.sha }}/checks -D branch=$BRANCH_NAME -D pullRequest=$PR_NUMBER" + - run: nix-shell --run "mvn --no-transfer-progress test jacoco:report && mvn --no-transfer-progress coveralls:report -D repoToken=${{ secrets.COVERALLS_TOKEN }} -D serviceName=github -D serviceBuildUrl=https://github.com/${{ github.repository }}/commit/${{ github.sha }}/checks -D branch=$BRANCH_NAME -D pullRequest=$PR_NUMBER" From 4635e517a619da6b8e5d725206d2c2d0b4ff0639 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 17 Feb 2021 06:07:53 +0100 Subject: [PATCH 0852/1003] Be more generous about ignoring generated code wrt. coverage --- pom.xml | 28 ++++++++++++++++++++-------- 1 file changed, 20 insertions(+), 8 deletions(-) diff --git a/pom.xml b/pom.xml index 10123a1c3..1f9d8b879 100644 --- a/pom.xml +++ b/pom.xml @@ -298,6 +298,18 @@ not try to aggregate this into the final coverage report, since we want to control aggregation ourselves. --> ${project.reporting.outputDirectory}/jacoco-ut + + + **/javacc/JavaCCParser* + **/javacc/JavaCCParserConstants* + **/javacc/JavaCCParserTokenManager* + **/javacc/JavaCharStream* + **/javacc/ParseException* + **/javacc/SimpleCharStream* + **/javacc/Token* + **/javacc/TokenMgrError* +
    @@ -305,14 +317,14 @@ - **/javacc/JavaCCParser.class - **/javacc/JavaCCParserConstants.class - **/javacc/JavaCCParserTokenManager.class - **/javacc/JavaCharStream.class - **/javacc/ParseException.class - **/javacc/SimpleCharStream.class - **/javacc/Token.class - **/javacc/TokenMgrError.class + **/javacc/JavaCCParser* + **/javacc/JavaCCParserConstants* + **/javacc/JavaCCParserTokenManager* + **/javacc/JavaCharStream* + **/javacc/ParseException* + **/javacc/SimpleCharStream* + **/javacc/Token* + **/javacc/TokenMgrError*
    From e67b722d89e9c06b9ff2d5fdb4e4f7f6f0a2676e Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 17 Feb 2021 06:34:48 +0100 Subject: [PATCH 0853/1003] Fix coveralls submission --- .github/workflows/tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 46a511af6..a86878ee9 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -18,4 +18,4 @@ jobs: BRANCH_NAME_OR_REF: ${{ github.head_ref || github.ref }} run: echo "name=BRANCH_NAME::${BRANCH_NAME_OR_REF#refs/heads/}" >> $GITHUB_ENV - run: echo "name=PR_NUMBER::$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")" >> $GITHUB_ENV - - run: nix-shell --run "mvn --no-transfer-progress test jacoco:report && mvn --no-transfer-progress coveralls:report -D repoToken=${{ secrets.COVERALLS_TOKEN }} -D serviceName=github -D serviceBuildUrl=https://github.com/${{ github.repository }}/commit/${{ github.sha }}/checks -D branch=$BRANCH_NAME -D pullRequest=$PR_NUMBER" + - run: nix-shell --run "mvn --no-transfer-progress test jacoco:report && mvn --no-transfer-progress coveralls:report -D repoToken=${{ secrets.COVERALLS_TOKEN }} -D serviceBuildUrl=https://github.com/${{ github.repository }}/commit/${{ github.sha }}/checks -D branch=$BRANCH_NAME -D pullRequest=$PR_NUMBER" From 924572b536b85389a5f5762c0a059f894a8e52ed Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 18 Feb 2021 21:20:57 +0100 Subject: [PATCH 0854/1003] Link to CI documentation in README --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 736363c96..90b75ce8f 100644 --- a/README.md +++ b/README.md @@ -62,3 +62,4 @@ Use the script `build-vlog-library.sh` [as shown here](#anchor-build-vlog) to cr * Users of Eclipse should install the [JavaCC Eclipse Plug-in](https://marketplace.eclipse.org/content/javacc-eclipse-plug) to generate the parser sources. After [installing](https://marketplace.eclipse.org/content/javacc-eclipse-plug/help) the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.rulewerk.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. * To build the standalone client jar, run `mvn install -Pclient`. This generates `standalone-rulewerk-client-[VERSION].jar` in `rulewerk-client/target`. +* The CI setup is [documented here](https://github.com/knowsys/rulewerk/wiki/CI-Setup). \ No newline at end of file From 17a3a1a520ddf41a8c9a522060d3983b802c0bf9 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Mon, 15 Mar 2021 18:54:22 +0100 Subject: [PATCH 0855/1003] update README with Matrix Element support room link --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 90b75ce8f..d7caf03e6 100644 --- a/README.md +++ b/README.md @@ -48,6 +48,7 @@ Documentation * The GitHub project **[Rulewerk Example](https://github.com/knowsys/rulewerk-example)** shows how to use Rulewerk in own Maven projects and can be used as a skeleton for own projects * [JavaDoc](https://knowsys.github.io/rulewerk/) is available online and through the Maven packages. * A Rulewerk [Wiki](https://github.com/knowsys/rulewerk/wiki) is available online, with detailed information about rulewerk usage, the supported rule language [examples](https://github.com/knowsys/rulewerk/wiki/Rule-syntax-by-examples) and [grammar](https://github.com/knowsys/rulewerk/wiki/Rule-syntax-grammar), and related publications. +* You can contact developers and other users about usage and or development on our [support channel](https://matrix.to/#/#rulewerk-support:tu-dresden.de). Development ----------- @@ -62,4 +63,4 @@ Use the script `build-vlog-library.sh` [as shown here](#anchor-build-vlog) to cr * Users of Eclipse should install the [JavaCC Eclipse Plug-in](https://marketplace.eclipse.org/content/javacc-eclipse-plug) to generate the parser sources. After [installing](https://marketplace.eclipse.org/content/javacc-eclipse-plug/help) the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.rulewerk.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. * To build the standalone client jar, run `mvn install -Pclient`. This generates `standalone-rulewerk-client-[VERSION].jar` in `rulewerk-client/target`. -* The CI setup is [documented here](https://github.com/knowsys/rulewerk/wiki/CI-Setup). \ No newline at end of file +* The CI setup is [documented here](https://github.com/knowsys/rulewerk/wiki/CI-Setup). From af27709c9624328eb2321e78f97ce0f2dad4e269 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Thu, 25 Mar 2021 11:46:20 +0100 Subject: [PATCH 0856/1003] correct javadoc for Reasoner#writeInfereces(String) --- .../java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java index 3f759f573..d94ea7128 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java @@ -176,7 +176,7 @@ default Stream getInferences() { * reasoning (materialisation) and its {@link KnowledgeBase}. * @throws IOException * @throws FileNotFoundException - * @deprecated Use {@link KnowledgeBase#writeInferences(Writer)} instead. The + * @deprecated Use {@link Reasoner#writeInferences(Writer)} instead. The * method will disappear. */ @Deprecated From 9a659c90daefb05730bd9cb82df0ccc4a6dcccee Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 29 Mar 2021 09:45:24 +0200 Subject: [PATCH 0857/1003] add dependency to parser; use autoformating --- rulewerk-vlog/pom.xml | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/rulewerk-vlog/pom.xml b/rulewerk-vlog/pom.xml index a098e8a17..54c6a8094 100644 --- a/rulewerk-vlog/pom.xml +++ b/rulewerk-vlog/pom.xml @@ -27,6 +27,11 @@ rulewerk-core ${project.version} + + ${project.groupId} + rulewerk-parser + ${project.version} + ${project.groupId} @@ -38,8 +43,8 @@ development - From 11ea039f2397ebb9bf5d65705b9e9da34d3c900a Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 29 Mar 2021 09:46:21 +0200 Subject: [PATCH 0858/1003] add issue61 --- .../reasoner/vlog/issues/Issue61.java | 71 +++++++++++++++++++ 1 file changed, 71 insertions(+) create mode 100644 rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue61.java diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue61.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue61.java new file mode 100644 index 000000000..d192b46a1 --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue61.java @@ -0,0 +1,71 @@ +package org.semanticweb.rulewerk.reasoner.vlog.issues; + +import static org.junit.Assert.assertEquals; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; + +public class Issue61 { + + @Test + public void part01() throws ParsingException, IOException { + KnowledgeBase kb = new KnowledgeBase(); + + RuleParser.parseInto(kb, "p(a)."); + RuleParser.parseInto(kb, "q(?X,!Y,!Z) :- p(?X) ."); + RuleParser.parseInto(kb, "q(?X,!Y,!Y) :- p(?X) ."); + + Reasoner reasoner = new VLogReasoner(kb); + reasoner.reason(); + + PositiveLiteral query = RuleParser.parsePositiveLiteral("q(?X,?Y,?Z)"); + + assertEquals(2,reasoner.countQueryAnswers(query,true).getCount()); + reasoner.close(); + } + + @Test + public void part02() throws ParsingException, IOException { + KnowledgeBase kb = new KnowledgeBase(); + + RuleParser.parseInto(kb, "p(a)."); + RuleParser.parseInto(kb, "q(?X,!Y,!Y) :- p(?X) ."); + RuleParser.parseInto(kb, "q(?X,!Y,!Z) :- p(?X) ."); + + Reasoner reasoner = new VLogReasoner(kb); + reasoner.reason(); + + PositiveLiteral query = RuleParser.parsePositiveLiteral("q(?X,?Y,?Z)"); + + assertEquals(1,reasoner.countQueryAnswers(query,true).getCount()); + reasoner.close(); + } + +} From 6482f2dbb35d3d7eabb9265d74c23e7cd2570419 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 29 Mar 2021 09:59:02 +0200 Subject: [PATCH 0859/1003] add issue67 --- .../reasoner/vlog/issues/Issue67.java | 59 +++++++++++++++++++ 1 file changed, 59 insertions(+) create mode 100644 rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue67.java diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue67.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue67.java new file mode 100644 index 000000000..8b72c7cbb --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue67.java @@ -0,0 +1,59 @@ +package org.semanticweb.rulewerk.reasoner.vlog.issues; + +import static org.junit.Assert.assertTrue; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; +import java.util.Set; +import java.util.stream.Collectors; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; + +public class Issue67 { + + @Test + public void part01() throws ParsingException, IOException { + KnowledgeBase kb = new KnowledgeBase(); + + RuleParser.parseInto(kb, "B1_(a, b, c, d, prov1) ."); + RuleParser.parseInto(kb, "B2_(a, a, c, prov2) . "); + RuleParser.parseInto(kb, "H1_(a, n1_2_0, n1_2_0, n1_3_0, n1_4_0) ."); + RuleParser.parseInto(kb, "H2_(n1_3_0, n1_5_0, n1_6_0) ."); + RuleParser.parseInto(kb, "true(?x1) :- B1_(?x1, ?x2, ?y1, ?y2, ?F_1), B2_(?x1, ?x1, ?y1, ?F_2), H1_(?x1, ?z1, ?z1, ?z2, ?F_3), H2_(?z2, ?z3, ?F_4) ."); + + Reasoner reasoner = new VLogReasoner(kb); + reasoner.reason(); + + Fact query = RuleParser.parseFact("true(a)."); + + Set inferences = reasoner.getInferences().collect(Collectors.toSet()); + assertTrue(inferences.contains(query)); + reasoner.close(); + } + +} From f4572bc1ba7f73dc7c55f359a60b12f4fe6828c3 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 29 Mar 2021 10:18:38 +0200 Subject: [PATCH 0860/1003] ordering; add issue 63; add @ignore --- .../reasoner/vlog/issues/Issue61.java | 17 ++++--- .../reasoner/vlog/issues/Issue63.java | 48 +++++++++++++++++++ .../reasoner/vlog/issues/Issue67.java | 9 ++-- 3 files changed, 64 insertions(+), 10 deletions(-) create mode 100644 rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue63.java diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue61.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue61.java index d192b46a1..6165fa13a 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue61.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue61.java @@ -24,6 +24,7 @@ import java.io.IOException; +import org.junit.Ignore; import org.junit.Test; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; @@ -34,6 +35,7 @@ public class Issue61 { + @Ignore @Test public void part01() throws ParsingException, IOException { KnowledgeBase kb = new KnowledgeBase(); @@ -41,16 +43,17 @@ public void part01() throws ParsingException, IOException { RuleParser.parseInto(kb, "p(a)."); RuleParser.parseInto(kb, "q(?X,!Y,!Z) :- p(?X) ."); RuleParser.parseInto(kb, "q(?X,!Y,!Y) :- p(?X) ."); - + Reasoner reasoner = new VLogReasoner(kb); reasoner.reason(); - + PositiveLiteral query = RuleParser.parsePositiveLiteral("q(?X,?Y,?Z)"); - - assertEquals(2,reasoner.countQueryAnswers(query,true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(query, true).getCount()); + reasoner.close(); } + @Ignore @Test public void part02() throws ParsingException, IOException { KnowledgeBase kb = new KnowledgeBase(); @@ -61,10 +64,10 @@ public void part02() throws ParsingException, IOException { Reasoner reasoner = new VLogReasoner(kb); reasoner.reason(); - + PositiveLiteral query = RuleParser.parsePositiveLiteral("q(?X,?Y,?Z)"); - - assertEquals(1,reasoner.countQueryAnswers(query,true).getCount()); + assertEquals(1, reasoner.countQueryAnswers(query, true).getCount()); + reasoner.close(); } diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue63.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue63.java new file mode 100644 index 000000000..1f09ecd89 --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue63.java @@ -0,0 +1,48 @@ +package org.semanticweb.rulewerk.reasoner.vlog.issues; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; + +public class Issue63 { + + @Test(expected = RulewerkRuntimeException.class) + public void test() throws ParsingException, IOException { + KnowledgeBase kb = new KnowledgeBase(); + + RuleParser.parseInto(kb, "p(a)."); + RuleParser.parseInto(kb, "q(?X) :- ~p(?X) ."); + + Reasoner reasoner = new VLogReasoner(kb); + reasoner.reason(); + + reasoner.close(); + } + +} diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue67.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue67.java index 8b72c7cbb..57b0a9c04 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue67.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue67.java @@ -26,6 +26,7 @@ import java.util.Set; import java.util.stream.Collectors; +import org.junit.Ignore; import org.junit.Test; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; @@ -36,6 +37,7 @@ public class Issue67 { + @Ignore @Test public void part01() throws ParsingException, IOException { KnowledgeBase kb = new KnowledgeBase(); @@ -44,15 +46,16 @@ public void part01() throws ParsingException, IOException { RuleParser.parseInto(kb, "B2_(a, a, c, prov2) . "); RuleParser.parseInto(kb, "H1_(a, n1_2_0, n1_2_0, n1_3_0, n1_4_0) ."); RuleParser.parseInto(kb, "H2_(n1_3_0, n1_5_0, n1_6_0) ."); - RuleParser.parseInto(kb, "true(?x1) :- B1_(?x1, ?x2, ?y1, ?y2, ?F_1), B2_(?x1, ?x1, ?y1, ?F_2), H1_(?x1, ?z1, ?z1, ?z2, ?F_3), H2_(?z2, ?z3, ?F_4) ."); + RuleParser.parseInto(kb, + "true(?x1) :- B1_(?x1, ?x2, ?y1, ?y2, ?F_1), B2_(?x1, ?x1, ?y1, ?F_2), H1_(?x1, ?z1, ?z1, ?z2, ?F_3), H2_(?z2, ?z3, ?F_4) ."); Reasoner reasoner = new VLogReasoner(kb); reasoner.reason(); + Set inferences = reasoner.getInferences().collect(Collectors.toSet()); Fact query = RuleParser.parseFact("true(a)."); - - Set inferences = reasoner.getInferences().collect(Collectors.toSet()); assertTrue(inferences.contains(query)); + reasoner.close(); } From 0af7a662e43532285a6ba3f5a049a1c131b0f00d Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 29 Mar 2021 10:21:57 +0200 Subject: [PATCH 0861/1003] detele extra spaces --- rulewerk-vlog/pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rulewerk-vlog/pom.xml b/rulewerk-vlog/pom.xml index 54c6a8094..798211aea 100644 --- a/rulewerk-vlog/pom.xml +++ b/rulewerk-vlog/pom.xml @@ -43,8 +43,8 @@ development - From d55fbebbd2ac48d371f9334aabb00910d66a65b3 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Tue, 30 Mar 2021 13:07:21 +0200 Subject: [PATCH 0862/1003] add integrationtests module; use autoformating --- coverage/pom.xml | 85 +++++++++++++++++++++++++----------------------- pom.xml | 31 ++++++++++-------- 2 files changed, 62 insertions(+), 54 deletions(-) diff --git a/coverage/pom.xml b/coverage/pom.xml index cd9c75339..83f678c46 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -14,46 +14,51 @@ coverage - - ${project.groupId} - rulewerk-core - ${project.version} - - - ${project.groupId} - rulewerk-vlog - ${project.version} - - - ${project.groupId} - rulewerk-rdf - ${project.version} - - - ${project.groupId} - rulewerk-owlapi - ${project.version} - - - ${project.groupId} - rulewerk-graal - ${project.version} - - - ${project.groupId} - rulewerk-parser - ${project.version} - - - ${project.groupId} - rulewerk-commands - ${project.version} - - - ${project.groupId} - rulewerk-client - ${project.version} - + + ${project.groupId} + rulewerk-core + ${project.version} + + + ${project.groupId} + rulewerk-vlog + ${project.version} + + + ${project.groupId} + rulewerk-rdf + ${project.version} + + + ${project.groupId} + rulewerk-owlapi + ${project.version} + + + ${project.groupId} + rulewerk-graal + ${project.version} + + + ${project.groupId} + rulewerk-parser + ${project.version} + + + ${project.groupId} + rulewerk-commands + ${project.version} + + + ${project.groupId} + rulewerk-client + ${project.version} + + + ${project.groupId} + rulewerk-integrationtests + ${project.version} + diff --git a/pom.xml b/pom.xml index 1f9d8b879..08b77fc58 100644 --- a/pom.xml +++ b/pom.xml @@ -1,5 +1,7 @@ - + 4.0.0 @@ -13,7 +15,7 @@ https://github.com/knowsys/rulewerk - rulewerk-core rulewerk-vlog @@ -24,8 +26,9 @@ rulewerk-commands rulewerk-examples rulewerk-client + rulewerk-integrationtests coverage - + @@ -131,7 +134,7 @@ org.codehaus.mojo license-maven-plugin 1.14 - + first @@ -155,7 +158,7 @@ - org.eclipse.m2e lifecycle-mapping @@ -174,7 +177,7 @@ - + @@ -189,7 +192,7 @@ - + @@ -231,7 +234,7 @@ license-maven-plugin - org.apache.maven.plugins maven-compiler-plugin @@ -294,13 +297,13 @@ test - ${project.reporting.outputDirectory}/jacoco-ut - + **/javacc/JavaCCParser* **/javacc/JavaCCParserConstants* **/javacc/JavaCCParserTokenManager* @@ -315,7 +318,7 @@ - **/javacc/JavaCCParser* **/javacc/JavaCCParserConstants* @@ -329,7 +332,7 @@ - org.apache.maven.plugins maven-javadoc-plugin From 8426b5224eb1617e1793542fd1fed81e1f8d11a2 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Tue, 30 Mar 2021 13:07:54 +0200 Subject: [PATCH 0863/1003] add vlog issues --- rulewerk-integrationtests/LICENSE.txt | 201 ++++++++++++++++++ rulewerk-integrationtests/pom.xml | 53 +++++ .../vlogissues/VLogIssue.java | 27 +++ .../vlogissues/VLogIssue61.java | 66 ++++++ .../vlogissues/VLogIssue63.java | 45 ++++ .../vlogissues/VLogIssue67.java | 51 +++++ .../vlogissues/VLogIssue69.java | 54 +++++ .../src/test/resources/vlogissues/61-1.rls | 3 + .../src/test/resources/vlogissues/61-2.rls | 3 + .../src/test/resources/vlogissues/63.rls | 3 + .../src/test/resources/vlogissues/67.rls | 5 + .../src/test/resources/vlogissues/69.rls | 6 + 12 files changed, 517 insertions(+) create mode 100644 rulewerk-integrationtests/LICENSE.txt create mode 100644 rulewerk-integrationtests/pom.xml create mode 100644 rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java create mode 100644 rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61.java create mode 100644 rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63.java create mode 100644 rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue67.java create mode 100644 rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69.java create mode 100644 rulewerk-integrationtests/src/test/resources/vlogissues/61-1.rls create mode 100644 rulewerk-integrationtests/src/test/resources/vlogissues/61-2.rls create mode 100644 rulewerk-integrationtests/src/test/resources/vlogissues/63.rls create mode 100644 rulewerk-integrationtests/src/test/resources/vlogissues/67.rls create mode 100644 rulewerk-integrationtests/src/test/resources/vlogissues/69.rls diff --git a/rulewerk-integrationtests/LICENSE.txt b/rulewerk-integrationtests/LICENSE.txt new file mode 100644 index 000000000..261eeb9e9 --- /dev/null +++ b/rulewerk-integrationtests/LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/rulewerk-integrationtests/pom.xml b/rulewerk-integrationtests/pom.xml new file mode 100644 index 000000000..5bc396a97 --- /dev/null +++ b/rulewerk-integrationtests/pom.xml @@ -0,0 +1,53 @@ + + + 4.0.0 + + org.semanticweb.rulewerk + rulewerk-parent + 0.8.0-SNAPSHOT + + + rulewerk-integrationtests + jar + + Rulewerk Integration Tests + Contains blackbox tests for VLog + + + + ${project.groupId} + rulewerk-core + ${project.version} + + + ${project.groupId} + rulewerk-parser + ${project.version} + + + ${project.groupId} + rulewerk-vlog + ${project.version} + + + + + + + + + + org.codehaus.mojo + cobertura-maven-plugin + 2.7 + + true + true + + + + + + \ No newline at end of file diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java new file mode 100644 index 000000000..93f0cc4b9 --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java @@ -0,0 +1,27 @@ +package org.semanticweb.rulewerk.integrationtests.vlogissues; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +class VLogIssue { + + final String RESOURCES = "src/test/resources/vlogissues/";; + +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61.java new file mode 100644 index 000000000..c99f081ff --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61.java @@ -0,0 +1,66 @@ +package org.semanticweb.rulewerk.integrationtests.vlogissues; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; + +import java.io.FileInputStream; +import java.io.IOException; + +import org.junit.Ignore; +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; + +public class VLogIssue61 extends VLogIssue{ + + @Ignore + @Test + public void test01() throws ParsingException, IOException { + KnowledgeBase kb = RuleParser.parse(new FileInputStream(RESOURCES + "61-1.rls")); + + Reasoner reasoner = new VLogReasoner(kb); + reasoner.reason(); + + PositiveLiteral query = RuleParser.parsePositiveLiteral("q(?X,?Y,?Z)"); + assertEquals(2, reasoner.countQueryAnswers(query, true).getCount()); + + reasoner.close(); + } + + @Test + public void test02() throws ParsingException, IOException { + KnowledgeBase kb = RuleParser.parse(new FileInputStream(RESOURCES + "61-2.rls")); + + Reasoner reasoner = new VLogReasoner(kb); + reasoner.reason(); + + PositiveLiteral query = RuleParser.parsePositiveLiteral("q(?X,?Y,?Z)"); + assertEquals(1, reasoner.countQueryAnswers(query, true).getCount()); + + reasoner.close(); + } + +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63.java new file mode 100644 index 000000000..422b3d153 --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63.java @@ -0,0 +1,45 @@ +package org.semanticweb.rulewerk.integrationtests.vlogissues; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.FileInputStream; +import java.io.IOException; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; + +public class VLogIssue63 extends VLogIssue { + + @Test(expected = RulewerkRuntimeException.class) + public void test() throws ParsingException, IOException { + KnowledgeBase kb = RuleParser.parse(new FileInputStream(RESOURCES + "63.rls")); + + Reasoner reasoner = new VLogReasoner(kb); + reasoner.reason(); + + reasoner.close(); + } +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue67.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue67.java new file mode 100644 index 000000000..73377a732 --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue67.java @@ -0,0 +1,51 @@ +package org.semanticweb.rulewerk.integrationtests.vlogissues; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; + +import java.io.FileInputStream; +import java.io.IOException; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; + +public class VLogIssue67 extends VLogIssue{ + + @Test + public void test() throws ParsingException, IOException { + KnowledgeBase kb = RuleParser.parse(new FileInputStream(RESOURCES + "67.rls")); + + Reasoner reasoner = new VLogReasoner(kb); + reasoner.reason(); + + PositiveLiteral query = RuleParser.parsePositiveLiteral("true(a)"); + assertEquals(1, reasoner.countQueryAnswers(query, true).getCount()); + + reasoner.close(); + } + +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69.java new file mode 100644 index 000000000..c1c8bd17f --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69.java @@ -0,0 +1,54 @@ +package org.semanticweb.rulewerk.integrationtests.vlogissues; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; + +import java.io.FileInputStream; +import java.io.IOException; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; + +public class VLogIssue69 extends VLogIssue{ + + @Test + public void test() throws ParsingException, IOException { + KnowledgeBase kb = RuleParser.parse(new FileInputStream(RESOURCES + "69.rls")); + + Reasoner reasoner = new VLogReasoner(kb); + reasoner.reason(); + + PositiveLiteral query1 = RuleParser.parsePositiveLiteral("prec(?X,?Y)"); + assertEquals(2, reasoner.countQueryAnswers(query1, true).getCount()); + + PositiveLiteral query2 = RuleParser.parsePositiveLiteral("conc(?X,?Y)"); + assertEquals(4, reasoner.countQueryAnswers(query2, true).getCount()); + + reasoner.close(); + } + +} diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/61-1.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/61-1.rls new file mode 100644 index 000000000..98e034606 --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/61-1.rls @@ -0,0 +1,3 @@ +p(a) . +q(?X,!Y,!Z) :- p(?X) . +q(?X,!Y,!Y) :- p(?X) . \ No newline at end of file diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/61-2.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/61-2.rls new file mode 100644 index 000000000..b722059d7 --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/61-2.rls @@ -0,0 +1,3 @@ +p(a) . +q(?X,!Y,!Y) :- p(?X) . +q(?X,!Y,!Z) :- p(?X) . diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/63.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/63.rls new file mode 100644 index 000000000..81832fc8a --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/63.rls @@ -0,0 +1,3 @@ +%https://github.com/karmaresearch/vlog/issues/61 +p(a). +q(?X):-~p(?X). \ No newline at end of file diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/67.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/67.rls new file mode 100644 index 000000000..a0854c7c1 --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/67.rls @@ -0,0 +1,5 @@ +B1_(a, b, c, d, prov1) . +B2_(a, a, c, prov2). +H1_(a, n1_2_0, n1_2_0, n1_3_0, n1_4_0) . +H2_(n1_3_0, n1_5_0, n1_6_0) . +true(?x1) :- B1_(?x1, ?x2, ?y1, ?y2, ?F_1), B2_(?x1, ?x1, ?y1, ?F_2), H1_(?x1, ?z1, ?z1, ?z2, ?F_3), H2_(?z2, ?z3, ?F_4) . \ No newline at end of file diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/69.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/69.rls new file mode 100644 index 000000000..9bb347613 --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/69.rls @@ -0,0 +1,6 @@ +B1_(a, b, c, d, prov1) . +B2_(a, a, c, prov2) . + +H1_(?x1, !z1, !z1, !z2, !F_2), H2_(!z2, !z3, !F_3), H3_(?x2, !F_4), H4_(!z1, !F_5) :- B1_(?x1, ?x2, ?y1, ?y2, ?F_0), B2_(?x1, ?x1, ?y1, ?F_1) . +inst(?x1, ?x2, ?y1, ?y2, !F_6, rule0) :- B1_(?x1, ?x2, ?y1, ?y2, ?F_0), B2_(?x1, ?x1, ?y1, ?F_1) . +prec(?F_0, ?F_6), prec(?F_1, ?F_6), conc(?F_6, ?F_2), conc(?F_6, ?F_3), conc(?F_6, ?F_4), conc(?F_6, ?F_5) :- B1_(?x1, ?x2, ?y1, ?y2, ?F_0), B2_(?x1, ?x1, ?y1, ?F_1), H1_(?x1, ?z1, ?z1, ?z2, ?F_2), H2_(?z2, ?z3, ?F_3), H3_(?x2, ?F_4), H4_(?z1, ?F_5), inst(?x1, ?x2, ?y1, ?y2, ?F_6, rule0) . \ No newline at end of file From f08f095ba36ba3197ee5cbb1210ea867ea1d55a1 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Tue, 30 Mar 2021 13:14:18 +0200 Subject: [PATCH 0864/1003] remove build section --- rulewerk-integrationtests/pom.xml | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/rulewerk-integrationtests/pom.xml b/rulewerk-integrationtests/pom.xml index 5bc396a97..83e940746 100644 --- a/rulewerk-integrationtests/pom.xml +++ b/rulewerk-integrationtests/pom.xml @@ -34,20 +34,4 @@ - - - - - - org.codehaus.mojo - cobertura-maven-plugin - 2.7 - - true - true - - - - - \ No newline at end of file From f6bc3a01a0a385d645de1141b5b43f09f075335b Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Fri, 9 Apr 2021 09:55:17 +0200 Subject: [PATCH 0865/1003] make VLogIssue class abstract; delete extra semicolon --- .../rulewerk/integrationtests/vlogissues/VLogIssue.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java index 93f0cc4b9..b1153ea19 100644 --- a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java @@ -20,8 +20,8 @@ * #L% */ -class VLogIssue { +abstract class VLogIssue { - final String RESOURCES = "src/test/resources/vlogissues/";; + final String RESOURCES = "src/test/resources/vlogissues/"; } From b75f783ca7e3ec01f5540b9350b19f6d44a02abf Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 16 Apr 2021 20:34:26 +0200 Subject: [PATCH 0866/1003] Refactor integration tests - Use maven-failsafe-plugin to run the integration tests - Exclude integration tests from code coverage, so that coverage reports are generated even when integration tests fail - Provide a convenient wrapper for creating VLogReasoner instances from RLS files --- coverage/pom.xml | 11 +- pom.xml | 34 +- rulewerk-integrationtests/pom.xml | 4 +- .../integrationtests/IntegrationTest.java | 77 +++ .../vlogissues/RulewerkIssue175IT.java | 35 ++ .../vlogissues/VLogIssue.java | 44 +- .../vlogissues/VLogIssue61.java | 66 --- .../{VLogIssue63.java => VLogIssue61IT.java} | 35 +- .../vlogissues/VLogIssue63IT.java | 19 + .../{VLogIssue67.java => VLogIssue67IT.java} | 23 +- .../vlogissues/VLogIssue69.java | 27 +- .../vlogissues/rulewerk/175-minimal.rls | 4 + .../resources/vlogissues/rulewerk/175.rls | 464 ++++++++++++++++++ .../resources/vlogissues/{ => vlog}/61-1.rls | 0 .../resources/vlogissues/{ => vlog}/61-2.rls | 0 .../resources/vlogissues/{ => vlog}/63.rls | 0 .../resources/vlogissues/{ => vlog}/67.rls | 0 .../resources/vlogissues/{ => vlog}/69.rls | 0 18 files changed, 691 insertions(+), 152 deletions(-) create mode 100644 rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/IntegrationTest.java create mode 100644 rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/RulewerkIssue175IT.java delete mode 100644 rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61.java rename rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/{VLogIssue63.java => VLogIssue61IT.java} (53%) create mode 100644 rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63IT.java rename rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/{VLogIssue67.java => VLogIssue67IT.java} (68%) create mode 100644 rulewerk-integrationtests/src/test/resources/vlogissues/rulewerk/175-minimal.rls create mode 100644 rulewerk-integrationtests/src/test/resources/vlogissues/rulewerk/175.rls rename rulewerk-integrationtests/src/test/resources/vlogissues/{ => vlog}/61-1.rls (100%) rename rulewerk-integrationtests/src/test/resources/vlogissues/{ => vlog}/61-2.rls (100%) rename rulewerk-integrationtests/src/test/resources/vlogissues/{ => vlog}/63.rls (100%) rename rulewerk-integrationtests/src/test/resources/vlogissues/{ => vlog}/67.rls (100%) rename rulewerk-integrationtests/src/test/resources/vlogissues/{ => vlog}/69.rls (100%) diff --git a/coverage/pom.xml b/coverage/pom.xml index 83f678c46..be85d779e 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -54,11 +54,12 @@ rulewerk-client ${project.version} - - ${project.groupId} - rulewerk-integrationtests - ${project.version} - + + + + + + diff --git a/pom.xml b/pom.xml index 08b77fc58..7ae152180 100644 --- a/pom.xml +++ b/pom.xml @@ -15,7 +15,7 @@ https://github.com/knowsys/rulewerk - rulewerk-core rulewerk-vlog @@ -79,7 +79,7 @@ UTF-8 - 4.13.1 + 4.13.2 2.28.2 1.7.28 3.9 @@ -158,7 +158,7 @@ - org.eclipse.m2e lifecycle-mapping @@ -216,7 +216,6 @@ - @@ -234,7 +233,7 @@ license-maven-plugin - org.apache.maven.plugins maven-compiler-plugin @@ -250,13 +249,26 @@ org.apache.maven.plugins maven-surefire-plugin - 3.0.0-M4 + 3.0.0-M5 ${surefireArgLine} 1 true + + org.apache.maven.plugins + maven-failsafe-plugin + 3.0.0-M5 + + + + integration-test + verify + + + + org.eluder.coveralls @@ -297,12 +309,12 @@ test - ${project.reporting.outputDirectory}/jacoco-ut - **/javacc/JavaCCParser* **/javacc/JavaCCParserConstants* @@ -318,7 +330,7 @@ - **/javacc/JavaCCParser* **/javacc/JavaCCParserConstants* @@ -332,7 +344,7 @@ - org.apache.maven.plugins maven-javadoc-plugin diff --git a/rulewerk-integrationtests/pom.xml b/rulewerk-integrationtests/pom.xml index 83e940746..91bd919e3 100644 --- a/rulewerk-integrationtests/pom.xml +++ b/rulewerk-integrationtests/pom.xml @@ -31,7 +31,5 @@ rulewerk-vlog ${project.version} - - - \ No newline at end of file + diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/IntegrationTest.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/IntegrationTest.java new file mode 100644 index 000000000..3bf14ab0b --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/IntegrationTest.java @@ -0,0 +1,77 @@ +package org.semanticweb.rulewerk.integrationtests; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.InputStream; + +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; + +public abstract class IntegrationTest { + /** + * Returns the prefix to use for resource names + * + * @return the prefix to use when turning resource names into paths + * + * This needs to be overriden in subpackages for loading to work + * correctly. + */ + protected String getResourcePrefix() { + return "/"; + } + + /** + * Obtain an input stream for a resource name + * + * @param resourceName the resource name to load + * @return an {@link InputStream} pointing to the resource + */ + protected InputStream getResourceAsStream(String resourceName) { + String prefix = getResourcePrefix(); + + if (resourceName.startsWith(prefix)) { + prefix = ""; + } else if (resourceName.startsWith("/") && prefix.endsWith("/")) { + prefix = prefix.substring(0, prefix.length() - 1); + } + + return this.getClass().getResourceAsStream(prefix + resourceName); + } + + /** + * Load a Knowledge Base from a resource name + * + * @param resourceName the name of the resource to parse into a Knowledge Base + * + * @throws ParsingException when there is an error during parsing + * + * @return a {@link KnowledgeBase} containing the parsed contents of the named + * resource + */ + protected KnowledgeBase parseKbFromResource(String resourceName) throws ParsingException { + KnowledgeBase kb = new KnowledgeBase(); + + RuleParser.parseInto(kb, getResourceAsStream(resourceName)); + + return kb; + } +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/RulewerkIssue175IT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/RulewerkIssue175IT.java new file mode 100644 index 000000000..b6e38d056 --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/RulewerkIssue175IT.java @@ -0,0 +1,35 @@ +package org.semanticweb.rulewerk.integrationtests.vlogissues; + +import static org.junit.Assert.assertTrue; + +import java.io.IOException; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class RulewerkIssue175IT extends VLogIssue { + @Test + public void issue175_full_succeeds() throws ParsingException, IOException { + try (final Reasoner reasoner = getReasonerWithKbFromResource("rulewerk/175.rls")) { + reasoner.reason(); + try (QueryResultIterator result = reasoner.answerQuery(Expressions.makePositiveLiteral("VANDALISMRESERVEDENTITIESSUPPREL0", + Expressions.makeAbstractConstant("VANDALISMRESERVEDENTITIESSUPPRULE50")), false)) { + assertTrue(result.hasNext()); + } + } + } + + @Test + public void issue175_minimal_succeeds() throws ParsingException, IOException { + try (final Reasoner reasoner = getReasonerWithKbFromResource("rulewerk/175-minimal.rls")) { + reasoner.reason(); + try (QueryResultIterator result = reasoner.answerQuery(Expressions.makePositiveLiteral("VANDALISMRESERVEDENTITIESSUPPREL0", + Expressions.makeAbstractConstant("VANDALISMRESERVEDENTITIESSUPPRULE50")), false)) { + assertTrue(result.hasNext()); + } + } + } +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java index b1153ea19..d46941a7e 100644 --- a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java @@ -1,27 +1,27 @@ package org.semanticweb.rulewerk.integrationtests.vlogissues; -/*- - * #%L - * Rulewerk Integration Tests - * %% - * Copyright (C) 2018 - 2021 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.integrationtests.IntegrationTest; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; -abstract class VLogIssue { - - final String RESOURCES = "src/test/resources/vlogissues/"; +abstract class VLogIssue extends IntegrationTest { + @Override + protected String getResourcePrefix() { + return "/vlogissues/"; + } + /** + * Obtain a reasoner loaded with the Knowledge Base read from the resource name + * + * @param resourceName the name of the resource to load into the Reasoner + * + * @throws ParsingException when there is an error during parsing + * + * @return a {@link VLogReasoner} containing the parsed contents of the named + * resource + */ + protected Reasoner getReasonerWithKbFromResource(String resourceName) throws ParsingException { + return new VLogReasoner(parseKbFromResource(resourceName)); + } } diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61.java deleted file mode 100644 index c99f081ff..000000000 --- a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61.java +++ /dev/null @@ -1,66 +0,0 @@ -package org.semanticweb.rulewerk.integrationtests.vlogissues; - -/*- - * #%L - * Rulewerk Integration Tests - * %% - * Copyright (C) 2018 - 2021 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import static org.junit.Assert.assertEquals; - -import java.io.FileInputStream; -import java.io.IOException; - -import org.junit.Ignore; -import org.junit.Test; -import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.Reasoner; -import org.semanticweb.rulewerk.parser.ParsingException; -import org.semanticweb.rulewerk.parser.RuleParser; -import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; - -public class VLogIssue61 extends VLogIssue{ - - @Ignore - @Test - public void test01() throws ParsingException, IOException { - KnowledgeBase kb = RuleParser.parse(new FileInputStream(RESOURCES + "61-1.rls")); - - Reasoner reasoner = new VLogReasoner(kb); - reasoner.reason(); - - PositiveLiteral query = RuleParser.parsePositiveLiteral("q(?X,?Y,?Z)"); - assertEquals(2, reasoner.countQueryAnswers(query, true).getCount()); - - reasoner.close(); - } - - @Test - public void test02() throws ParsingException, IOException { - KnowledgeBase kb = RuleParser.parse(new FileInputStream(RESOURCES + "61-2.rls")); - - Reasoner reasoner = new VLogReasoner(kb); - reasoner.reason(); - - PositiveLiteral query = RuleParser.parsePositiveLiteral("q(?X,?Y,?Z)"); - assertEquals(1, reasoner.countQueryAnswers(query, true).getCount()); - - reasoner.close(); - } - -} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61IT.java similarity index 53% rename from rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63.java rename to rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61IT.java index 422b3d153..960e02279 100644 --- a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63.java +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61IT.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,26 +20,35 @@ * #L% */ -import java.io.FileInputStream; +import static org.junit.Assert.assertEquals; + import java.io.IOException; import org.junit.Test; -import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; -import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; -public class VLogIssue63 extends VLogIssue { +public class VLogIssue61IT extends VLogIssue { + + @Test + public void ruleset01_succeeds() throws ParsingException, IOException { + try (final Reasoner reasoner = getReasonerWithKbFromResource("vlog/61-1.rls")) { + reasoner.reason(); - @Test(expected = RulewerkRuntimeException.class) - public void test() throws ParsingException, IOException { - KnowledgeBase kb = RuleParser.parse(new FileInputStream(RESOURCES + "63.rls")); + PositiveLiteral query = RuleParser.parsePositiveLiteral("q(?X,?Y,?Z)"); + assertEquals(2, reasoner.countQueryAnswers(query, true).getCount()); + } + } - Reasoner reasoner = new VLogReasoner(kb); - reasoner.reason(); + @Test + public void ruleset02_succeeds() throws ParsingException, IOException { + try (final Reasoner reasoner = getReasonerWithKbFromResource("vlog/61-2.rls")) { + reasoner.reason(); - reasoner.close(); + PositiveLiteral query = RuleParser.parsePositiveLiteral("q(?X,?Y,?Z)"); + assertEquals(1, reasoner.countQueryAnswers(query, true).getCount()); + } } } diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63IT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63IT.java new file mode 100644 index 000000000..9412c78d4 --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63IT.java @@ -0,0 +1,19 @@ +package org.semanticweb.rulewerk.integrationtests.vlogissues; + +import java.io.IOException; + +import org.junit.Ignore; +import org.junit.Test; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class VLogIssue63IT extends VLogIssue { + + @Ignore + @Test + public void test() throws ParsingException, IOException { + try (final Reasoner reasoner = getReasonerWithKbFromResource("vlog/63.rls")) { + reasoner.reason(); + } + } +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue67.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue67IT.java similarity index 68% rename from rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue67.java rename to rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue67IT.java index 73377a732..ed9daa56f 100644 --- a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue67.java +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue67IT.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,30 +22,23 @@ import static org.junit.Assert.assertEquals; -import java.io.FileInputStream; import java.io.IOException; import org.junit.Test; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; -import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; -public class VLogIssue67 extends VLogIssue{ +public class VLogIssue67IT extends VLogIssue { @Test public void test() throws ParsingException, IOException { - KnowledgeBase kb = RuleParser.parse(new FileInputStream(RESOURCES + "67.rls")); + try (final Reasoner reasoner = getReasonerWithKbFromResource("vlog/67.rls")) { + reasoner.reason(); - Reasoner reasoner = new VLogReasoner(kb); - reasoner.reason(); - - PositiveLiteral query = RuleParser.parsePositiveLiteral("true(a)"); - assertEquals(1, reasoner.countQueryAnswers(query, true).getCount()); - - reasoner.close(); + PositiveLiteral query = RuleParser.parsePositiveLiteral("true(a)"); + assertEquals(1, reasoner.countQueryAnswers(query, true).getCount()); + } } - } diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69.java index c1c8bd17f..73f7cddd8 100644 --- a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69.java +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,33 +22,26 @@ import static org.junit.Assert.assertEquals; -import java.io.FileInputStream; import java.io.IOException; import org.junit.Test; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; -import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; -public class VLogIssue69 extends VLogIssue{ +public class VLogIssue69 extends VLogIssue { @Test public void test() throws ParsingException, IOException { - KnowledgeBase kb = RuleParser.parse(new FileInputStream(RESOURCES + "69.rls")); + try (final Reasoner reasoner = getReasonerWithKbFromResource("vlog/67.rls")) { + reasoner.reason(); - Reasoner reasoner = new VLogReasoner(kb); - reasoner.reason(); + PositiveLiteral query1 = RuleParser.parsePositiveLiteral("prec(?X,?Y)"); + assertEquals(2, reasoner.countQueryAnswers(query1, true).getCount()); - PositiveLiteral query1 = RuleParser.parsePositiveLiteral("prec(?X,?Y)"); - assertEquals(2, reasoner.countQueryAnswers(query1, true).getCount()); - - PositiveLiteral query2 = RuleParser.parsePositiveLiteral("conc(?X,?Y)"); - assertEquals(4, reasoner.countQueryAnswers(query2, true).getCount()); - - reasoner.close(); + PositiveLiteral query2 = RuleParser.parsePositiveLiteral("conc(?X,?Y)"); + assertEquals(4, reasoner.countQueryAnswers(query2, true).getCount()); + } } - } diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/rulewerk/175-minimal.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/rulewerk/175-minimal.rls new file mode 100644 index 000000000..0013de9db --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/rulewerk/175-minimal.rls @@ -0,0 +1,4 @@ +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE50) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottom00(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottom00(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/rulewerk/175.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/rulewerk/175.rls new file mode 100644 index 000000000..a78e9911e --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/rulewerk/175.rls @@ -0,0 +1,464 @@ +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS164, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS164) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS135, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS135) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS175, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS175) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottom0(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS107, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS107) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS159, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS159) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS165, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS165) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS128, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS128) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS45) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS45) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS188) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS188) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS120, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS120) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS8) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS8) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottom0(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS73), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS73) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS117, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS117) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS143, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS143) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS15) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS15) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS153, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS153) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS102, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS102) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS168, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS168) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS178, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS178) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS150, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS150) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS3) :- VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS3) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS167, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS167) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS141) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS141) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS100, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS100) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS115, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS115) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS14) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS14) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146) . +VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS351) :- VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS350), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS351, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS350) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS196, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS196) :- VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS196) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579) :- VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS578) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS580) :- VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS310, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS310) :- VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS310, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS311) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS311, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS311) :- VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS310, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS311) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS505) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS504), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS505, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS504) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS273, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS273) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS273) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS511) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS510), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS511, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS510) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS276, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS276) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS276) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS243, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS243) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS243) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS445) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS444), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS445, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS444) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS279, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS279) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS279) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS517) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS516), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS517, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS516) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS519) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS518), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS519, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS518) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS280, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS280) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS280) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS281, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS281) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS281) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS521) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS520), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS521, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS520) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS319, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS319) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS319) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS596) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS595), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS596, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS595) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS322, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS322) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS322) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS602) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS601), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS602, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS601) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS321, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS321) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS321) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS600) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS599), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS600, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS599) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS369) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS368), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS369, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS368) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS205, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS205) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS205) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS207, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS207) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS207) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS373) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS372), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS373, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS372) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS208, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS208) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS208) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS375) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS374), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS375, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS374) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS455) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS454), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS455, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS454) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS248, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS248) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS248) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS247, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS247) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS247) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS453) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS452), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS453, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS452) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS327, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS327) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS327) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS612) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS611), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS612, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS611) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS535) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS534), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS535, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS534) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS288, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS288) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS288) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS252, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS252) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS252) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS463) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS462), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS463, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS462) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS215, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS215) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS215) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS389) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS388), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS389, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS388) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS391) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS390), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS391, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS390) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS216, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS216) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS216) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS218, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS218) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS218) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS395) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS394), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS395, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS394) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS256, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS256) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS256) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS471) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS470), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS471, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS470) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS260, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS260) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS260) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS479) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS478), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS479, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS478) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS549) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS548), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS549, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS548) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS295, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS295) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS295) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS481) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS480), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS481, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS480) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS261, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS261) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS261) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS555) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS554), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS555, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS554) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS298, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS298) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS298) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS634) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS633), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS634, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS633) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS338, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS338) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS338) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS300, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS300) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS300) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS559) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS558), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS559, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS558) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS227, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS227) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS227) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS413) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS412), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS413, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS412) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS411) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS410), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS411, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS410) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS226, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS226) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS226) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS341, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS341) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS341) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS640) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS639), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS640, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS639) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS304, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS304) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS304) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS567) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS566), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS567, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS566) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS344, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS344) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS344) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS646) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS645), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS646, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS645) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS266, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS266) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS266) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS491) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS490), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS491, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS490) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS419) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS418), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS419, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS418) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS230, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS230) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS230) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS232, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS232) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS232) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS423) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS422), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS423, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS422) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS493) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS492), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS493, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS492) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS267, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS267) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS267) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS431) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS430), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS431, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS430) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS236, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS236) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS236) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE155) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS164, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS164) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS164) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS164, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS164) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS135) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS135, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS135) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE6) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS135, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS135) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS175) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS175, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS175) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE189) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS175, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS175) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000002000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottom00(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000000400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottom00(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE50) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottom00(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS107) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS107, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS107) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE58) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS107, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS107) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE2) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000040900(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE35) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS159, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS159) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS159) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS159, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS159) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE44) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS165, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS165) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS165) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS165, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS165) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE176) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS128, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS128) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS128) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS128, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS128) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE53) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000007800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE54) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE55) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000003000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS45), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS45) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000002700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS45) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000003000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS45), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS45) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE56) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE159) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000007800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE14) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS188) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000002700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS188), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS188) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE110) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000002700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS188), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS188) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE113) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE21) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE22) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE89) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS120, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS120) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS120) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS120, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS120) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE23) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS8), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS8) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000040900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS8) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS8), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS8) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE26) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE28) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE67) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottom00(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS73), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS73) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000000400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS73) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottom00(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS73), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS73) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000003100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS73) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottom00(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS73), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS73) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE64) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS117, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS117) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS117) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS117, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS117) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE70) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE170) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE127) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE154) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS143, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS143) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS143) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS143, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS143) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE174) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000003000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS15) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000003100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS15), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS15) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE38) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000003100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS15), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS15) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE156) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS153, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS153) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS153) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS153, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS153) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS102) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS102, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS102) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE74) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS102, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS102) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE133) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE82) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE85) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS168) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS168, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS168) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE150) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS168, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS168) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE181) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS178) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS178, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS178) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE175) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS178, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS178) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE81) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS150, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS150) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS150) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS150, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS150) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE1) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000022500(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE90) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS3), VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS3) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS3) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS3), VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS3) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000022500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE184) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE43) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000022500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE92) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE19) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS167, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS167) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS167) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS167, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS167) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE0) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000007800(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000022500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS141) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS141), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS141) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE188) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS141), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS141) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE129) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS100, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS100) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS100) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS100, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS100) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS115) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS115, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS115) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE9) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS115, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS115) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE96) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE46) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE190) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE97) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000040900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE3) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset00(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000040900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE191) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE99) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS14), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS14) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000007800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS14) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS14), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS14) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE100) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE49) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE4) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0, VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE5) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE193) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS351), VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS350), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS351, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS350) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS350) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS351), VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS350), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS351, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS350) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS351, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS350) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS351), VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS350), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS351, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS350) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS196) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS196, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS196), VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS196) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE194) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS196, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS196), VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS196) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS578) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS578) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS578) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE421) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS578) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE422) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS580), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS580), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS580), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS310, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS311) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS310, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS310), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS310, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS311) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE423) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS310, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS310), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS310, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS311) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE424) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS311, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS311), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS310, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS311) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS310, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS311) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS311, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS311), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS310, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS311) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS504) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS505), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS504), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS505, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS504) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE347) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS505), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS504), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS505, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS504) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS505, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS504) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS505), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS504), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS505, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS504) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS273) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS273, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS273), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS273) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE348) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS273, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS273), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS273) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE353) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS511), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS510), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS511, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS510) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS510) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS511), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS510), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS511, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS510) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS511, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS510) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS511), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS510), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS511, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS510) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE354) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS276, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS276), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS276) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS276) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS276, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS276), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS276) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE287) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS243, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS243), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS243) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS243) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS243, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS243), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS243) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE288) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS445), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS444), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS445, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS444) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS444) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS445), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS444), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS445, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS444) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS445, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS444) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS445), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS444), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS445, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS444) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS279) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS279, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS279), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS279) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE359) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS279, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS279), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS279) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS516) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS517), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS516), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS517, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS516) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS517, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS516) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS517), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS516), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS517, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS516) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE360) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS517), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS516), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS517, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS516) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS518) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS519), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS518), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS519, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS518) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE361) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS519), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS518), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS519, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS518) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS519, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS518) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS519), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS518), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS519, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS518) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE362) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS280, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS280), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS280) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS280) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS280, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS280), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS280) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS281) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS281, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS281), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS281) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE363) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS281, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS281), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS281) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE364) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS521), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS520), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS521, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS520) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS521, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS520) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS521), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS520), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS521, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS520) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS520) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS521), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS520), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS521, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS520) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE439) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS319, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS319), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS319) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS319) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS319, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS319), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS319) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE440) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS596), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS595), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS596, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS595) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS595) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS596), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS595), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS596, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS595) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS596, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS595) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS596), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS595), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS596, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS595) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE445) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS322, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS322), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS322) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS322) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS322, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS322), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS322) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS601) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS602), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS601), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS602, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS601) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS602, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS601) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS602), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS601), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS602, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS601) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE446) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS602), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS601), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS602, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS601) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE443) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS321, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS321), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS321) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS321) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS321, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS321), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS321) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS600, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS599) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS600), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS599), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS600, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS599) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE444) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS600), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS599), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS600, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS599) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS599) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS600), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS599), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS600, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS599) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE211) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS369), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS368), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS369, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS368) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS369, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS368) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS369), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS368), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS369, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS368) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS368) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS369), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS368), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS369, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS368) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS205) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS205, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS205), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS205) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE212) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS205, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS205), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS205) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE215) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS207, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS207), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS207) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS207) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS207, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS207), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS207) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE216) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS373), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS372), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS373, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS372) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS373, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS372) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS373), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS372), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS373, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS372) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS372) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS373), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS372), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS373, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS372) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS208) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS208, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS208), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS208) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE217) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS208, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS208), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS208) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS374) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS375), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS374), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS375, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS374) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE218) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS375), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS374), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS375, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS374) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS375, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS374) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS375), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS374), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS375, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS374) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS455, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS454) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS455), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS454), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS455, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS454) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE297) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS455), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS454), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS455, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS454) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS454) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS455), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS454), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS455, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS454) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE298) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS248, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS248), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS248) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS248) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS248, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS248), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS248) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE295) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS247, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS247), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS247) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS247) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS247, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS247), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS247) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE296) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS453), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS452), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS453, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS452) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS452) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS453), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS452), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS453, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS452) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS453, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS452) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS453), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS452), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS453, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS452) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000003100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS327) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS327, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS327), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS327) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE455) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS327, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS327), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS327) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000003100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS611) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000003100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS612), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS611), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS612, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS611) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE456) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000003100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS612), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS611), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS612, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS611) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS612, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS611) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000003100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS612), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS611), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS612, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS611) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS534) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS535), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS534), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS535, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS534) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS535, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS534) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS535), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS534), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS535, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS534) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE377) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS535), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS534), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS535, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS534) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE378) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS288, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS288), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS288) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS288) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS288, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS288), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS288) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE305) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS252, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS252), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS252) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS252) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS252, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS252), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS252) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE306) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS463), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS462), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS463, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS462) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS462) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS463), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS462), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS463, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS462) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS463, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS462) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS463), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS462), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS463, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS462) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000007800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS215) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS215, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS215), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS215) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE231) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS215, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS215), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS215) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE232) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000007800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS389), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS388), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS389, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS388) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000007800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS388) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000007800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS389), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS388), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS389, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS388) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS389, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS388) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000007800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS389), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS388), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS389, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS388) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS391, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS390) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000003000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS391), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS390), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS391, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS390) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE233) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000003000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS391), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS390), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS391, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS390) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000003000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS390) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000003000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS391), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS390), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS391, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS390) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE234) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS216, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS216), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS216) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000003000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS216) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS216, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS216), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS216) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE237) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS218, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS218), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS218) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000000400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS218) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS218, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS218), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS218) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE238) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000000400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS395), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS394), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS395, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS394) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS395, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS394) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000000400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS395), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS394), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS395, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS394) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000000400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS394) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000000400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS395), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS394), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS395, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS394) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS256) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS256, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS256), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS256) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE313) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS256, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS256), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS256) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE314) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS471), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS470), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS471, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS470) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS471, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS470) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS471), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS470), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS471, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS470) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS470) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS471), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS470), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS471, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS470) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE321) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS260, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS260), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS260) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS260) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS260, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS260), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS260) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE322) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS479), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS478), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS479, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS478) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS479, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS478) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS479), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS478), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS479, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS478) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS478) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS479), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS478), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS479, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS478) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS549, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS548) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS549), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS548), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS549, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS548) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE391) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS549), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS548), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS549, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS548) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS548) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS549), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS548), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS549, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS548) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS295) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS295, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS295), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS295) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE392) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS295, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS295), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS295) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS480) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS481), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS480), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS481, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS480) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS481, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS480) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS481), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS480), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS481, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS480) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE323) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS481), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS480), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS481, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS480) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS261) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS261, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS261), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS261) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE324) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS261, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS261), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS261) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS554) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS555), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS554), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS555, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS554) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE397) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS555), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS554), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS555, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS554) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS555, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS554) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS555), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS554), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS555, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS554) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS298) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS298, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS298), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS298) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE398) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS298, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS298), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS298) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE477) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS634), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS633), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS634, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS633) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS633) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS634), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS633), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS634, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS633) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS634, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS633) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS634), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS633), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS634, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS633) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE478) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS338, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS338), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS338) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS338) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS338, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS338), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS338) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE401) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS300, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS300), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS300) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS300) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS300, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS300), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS300) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS559, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS558) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS559), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS558), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS559, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS558) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE402) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS559), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS558), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS559, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS558) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS558) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS559), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS558), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS559, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS558) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE255) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS227, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS227), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS227) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000002700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS227) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS227, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS227), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS227) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000002700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS412) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000002700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS413), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS412), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS413, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS412) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS413, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS412) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000002700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS413), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS412), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS413, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS412) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE256) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000002700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS413), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS412), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS413, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS412) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE253) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS411), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS410), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS411, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS410) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS410) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS411), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS410), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS411, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS410) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS411, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS410) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS411), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS410), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS411, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS410) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE254) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS226, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS226), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS226) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS226) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS226, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS226), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS226) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS341) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS341, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS341), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS341) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE483) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS341, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS341), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS341) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS639) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS640), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS639), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS640, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS639) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE484) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS640), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS639), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS640, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS639) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS640, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS639) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS640), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS639), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS640, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS639) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000022500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS304) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS304, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS304), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS304) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE409) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS304, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS304), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS304) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS567, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS566) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000022500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS567), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS566), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS567, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS566) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE410) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000022500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS567), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS566), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS567, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS566) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000022500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS566) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000022500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS567), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS566), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS567, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS566) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE489) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS344, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS344), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS344) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS344) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS344, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS344), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS344) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS646, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS645) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS646), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS645), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS646, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS645) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS645) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS646), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS645), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS646, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS645) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE490) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS646), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS645), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS646, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS645) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE333) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS266, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS266), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS266) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS266) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS266, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS266), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS266) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS490) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS491), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS490), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS491, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS490) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS491, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS490) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS491), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS490), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS491, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS490) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE334) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS491), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS490), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS491, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS490) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE261) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS419), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS418), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS419, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS418) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS419, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS418) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS419), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS418), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS419, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS418) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS418) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS419), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS418), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS419, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS418) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE262) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS230, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS230), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS230) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS230) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS230, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS230), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS230) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE265) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS232, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS232), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS232) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000002000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS232) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS232, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS232), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS232) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000002000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS422) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000002000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS423), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS422), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS423, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS422) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS423, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS422) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000002000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS423), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS422), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS423, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS422) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE266) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000002000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS423), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS422), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS423, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS422) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE335) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000040900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS493), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS492), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS493, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS492) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000040900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS492) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000040900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS493), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS492), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS493, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS492) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS493, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS492) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000040900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS493), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS492), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS493, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS492) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE336) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS267, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS267), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS267) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000040900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS267) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS267, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS267), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS267) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS430) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS431), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS430), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS431, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS430) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE273) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS431), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS430), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS431, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS430) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS431, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS430) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS431), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS430), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS431, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS430) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS236) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS236, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS236), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS236) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE274) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS236, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS236), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS236) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0, VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000000400(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottom00(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000002000(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/61-1.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/61-1.rls similarity index 100% rename from rulewerk-integrationtests/src/test/resources/vlogissues/61-1.rls rename to rulewerk-integrationtests/src/test/resources/vlogissues/vlog/61-1.rls diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/61-2.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/61-2.rls similarity index 100% rename from rulewerk-integrationtests/src/test/resources/vlogissues/61-2.rls rename to rulewerk-integrationtests/src/test/resources/vlogissues/vlog/61-2.rls diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/63.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/63.rls similarity index 100% rename from rulewerk-integrationtests/src/test/resources/vlogissues/63.rls rename to rulewerk-integrationtests/src/test/resources/vlogissues/vlog/63.rls diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/67.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/67.rls similarity index 100% rename from rulewerk-integrationtests/src/test/resources/vlogissues/67.rls rename to rulewerk-integrationtests/src/test/resources/vlogissues/vlog/67.rls diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/69.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/69.rls similarity index 100% rename from rulewerk-integrationtests/src/test/resources/vlogissues/69.rls rename to rulewerk-integrationtests/src/test/resources/vlogissues/vlog/69.rls From 36a77732a5c74cdcaaacbb9a40f74c4dd683ea86 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 16 Apr 2021 20:49:39 +0200 Subject: [PATCH 0867/1003] Bump VLog & Trident for CI --- vlog/trident.nix | 6 +++--- vlog/vlog.nix | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/vlog/trident.nix b/vlog/trident.nix index c7c222e51..df418aa77 100644 --- a/vlog/trident.nix +++ b/vlog/trident.nix @@ -1,10 +1,10 @@ { pkgs, lz4, git, zlib, cmake, cacert, sparsehash, kognac, ... }: pkgs.stdenv.mkDerivation { - name = "trident-unstable-2021-02-05"; + name = "trident-unstable-2021-04-01"; src = pkgs.fetchgit { url = "git://github.com/karmaresearch/trident"; - rev = "53630ea83460b5e78851b753f245efaefbcaa57f"; - sha256 = "1irjdzjxzwakgalliry23vcl5iqf0w5bm82wra91mlyqmgirnk2x"; + rev = "087e90509434f84e927251c0aa8f1dd91dbb64c7"; + sha256 = "01qw93b0hvvr7vgk24d550mvrcj7lb5chrkh0y68x1mr01di2a87"; }; buildInputs = [ zlib sparsehash lz4 ]; diff --git a/vlog/vlog.nix b/vlog/vlog.nix index 1dd186c47..e74eea379 100644 --- a/vlog/vlog.nix +++ b/vlog/vlog.nix @@ -3,8 +3,8 @@ pkgs.stdenv.mkDerivation { name = "vlog"; src = pkgs.fetchgit { url = "git://github.com/karmaresearch/vlog"; - rev = "c20fa48fc284b333ce03e63ca3ad97dc51701542"; - sha256 = "0y1zv4bwb84rv09ihc8jc11hxxffrspk8v01s28cv2nymg2306q4"; + rev = "7356ed98db064ee30300950441716545b819f3a1"; + sha256 = "127jykvgvikyv8nw4ih73qs6cin6ck5bfc0p53svv7hh9zn7vaj2"; }; buildInputs = [ kognac trident sparsehash jdk curl lz4 ]; From c5239878b7aa90bb5bba44d2c833c01902ab7a5a Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 16 Apr 2021 21:03:16 +0200 Subject: [PATCH 0868/1003] Update license headers --- .../vlogissues/RulewerkIssue175IT.java | 20 ++++++++++++++++++ .../vlogissues/VLogIssue.java | 20 ++++++++++++++++++ .../vlogissues/VLogIssue63IT.java | 21 ++++++++++++++++++- 3 files changed, 60 insertions(+), 1 deletion(-) diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/RulewerkIssue175IT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/RulewerkIssue175IT.java index b6e38d056..cda83279e 100644 --- a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/RulewerkIssue175IT.java +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/RulewerkIssue175IT.java @@ -1,5 +1,25 @@ package org.semanticweb.rulewerk.integrationtests.vlogissues; +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import static org.junit.Assert.assertTrue; import java.io.IOException; diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java index d46941a7e..7e304969b 100644 --- a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java @@ -1,5 +1,25 @@ package org.semanticweb.rulewerk.integrationtests.vlogissues; +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.integrationtests.IntegrationTest; import org.semanticweb.rulewerk.parser.ParsingException; diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63IT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63IT.java index 9412c78d4..8016ed069 100644 --- a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63IT.java +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63IT.java @@ -1,5 +1,25 @@ package org.semanticweb.rulewerk.integrationtests.vlogissues; +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import java.io.IOException; import org.junit.Ignore; @@ -9,7 +29,6 @@ public class VLogIssue63IT extends VLogIssue { - @Ignore @Test public void test() throws ParsingException, IOException { try (final Reasoner reasoner = getReasonerWithKbFromResource("vlog/63.rls")) { From ef91059abac7780ba9fd52b71c86232d99cb9a52 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 16 Apr 2021 21:03:22 +0200 Subject: [PATCH 0869/1003] Fix typo in nix shell hook --- shell.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/shell.nix b/shell.nix index 959415135..9d8842c07 100644 --- a/shell.nix +++ b/shell.nix @@ -5,7 +5,7 @@ let pkgs = import {}; deps = dependencies.deps // { maven = maven; }; in pkgs.mkShell { buildInputsNative = [ maven deps.jdk dependencies.vlog ]; - shellHook = ''' + shellHook = '' mkdir -p rulewerk-vlog/lib/ ln -sf ${dependencies.vlog.dev}/jvlog.jar rulewerk-vlog/lib/jvlog-local.jar mvn --no-transfer-progress initialize -Pdevelopment From c3c6061acbaee27daefbf18eaa5c50fb2093344f Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 16 Apr 2021 21:05:40 +0200 Subject: [PATCH 0870/1003] Fix test case for VLog issue #63 --- .../rulewerk/integrationtests/vlogissues/VLogIssue63IT.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63IT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63IT.java index 8016ed069..f289764b4 100644 --- a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63IT.java +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63IT.java @@ -22,14 +22,14 @@ import java.io.IOException; -import org.junit.Ignore; import org.junit.Test; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.parser.ParsingException; public class VLogIssue63IT extends VLogIssue { - @Test + @Test(expected = RulewerkRuntimeException.class) public void test() throws ParsingException, IOException { try (final Reasoner reasoner = getReasonerWithKbFromResource("vlog/63.rls")) { reasoner.reason(); From 99f64ebb38c7495c9e77d04bcaef98c76b5de207 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 16 Apr 2021 21:06:38 +0200 Subject: [PATCH 0871/1003] Fix test case for VLog issue #61 --- .../rulewerk/integrationtests/vlogissues/VLogIssue61IT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61IT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61IT.java index 960e02279..a79b34c4a 100644 --- a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61IT.java +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61IT.java @@ -48,7 +48,7 @@ public void ruleset02_succeeds() throws ParsingException, IOException { reasoner.reason(); PositiveLiteral query = RuleParser.parsePositiveLiteral("q(?X,?Y,?Z)"); - assertEquals(1, reasoner.countQueryAnswers(query, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(query, true).getCount()); } } } From 1138d03d8290c61a15c153efd761b90d96b6a87b Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 16 Apr 2021 21:09:38 +0200 Subject: [PATCH 0872/1003] Add workflow for integration tests --- .github/workflows/integration-tests.yml | 21 +++++++++++++++++++ .../workflows/{tests.yml => unit-tests.yml} | 2 +- 2 files changed, 22 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/integration-tests.yml rename .github/workflows/{tests.yml => unit-tests.yml} (97%) diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml new file mode 100644 index 000000000..7aaf7f98a --- /dev/null +++ b/.github/workflows/integration-tests.yml @@ -0,0 +1,21 @@ +name: "Rulewerk Integration Tests" +on: + pull_request: + push: +jobs: + tests: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2.3.4 + - uses: cachix/install-nix-action@v12 + with: + nix_path: nixpkgs=channel:nixos-unstable + - uses: cachix/cachix-action@v8 + with: + name: knowsys + authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' + - env: + BRANCH_NAME_OR_REF: ${{ github.head_ref || github.ref }} + run: echo "name=BRANCH_NAME::${BRANCH_NAME_OR_REF#refs/heads/}" >> $GITHUB_ENV + - run: echo "name=PR_NUMBER::$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")" >> $GITHUB_ENV + - run: nix-shell --run "mvn --no-transfer-progress failsafe:integration-test" diff --git a/.github/workflows/tests.yml b/.github/workflows/unit-tests.yml similarity index 97% rename from .github/workflows/tests.yml rename to .github/workflows/unit-tests.yml index a86878ee9..5cb3cc663 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/unit-tests.yml @@ -1,4 +1,4 @@ -name: "Tests" +name: "Rulewerk Unit Tests" on: pull_request: push: From f411cb8514c59698c38753d984576bf49365cb01 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 16 Apr 2021 21:14:57 +0200 Subject: [PATCH 0873/1003] Run integration tests only after completion of unit tests --- .github/workflows/integration-tests.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 7aaf7f98a..4f34ae373 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -1,7 +1,9 @@ name: "Rulewerk Integration Tests" on: - pull_request: - push: + workflow_run: + workflows: ["Rulewerk Unit Tests"] + types: + - completed jobs: tests: runs-on: ubuntu-latest From 4a2d9ed1051b3f1250579bc8594f53eb82f5dddf Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 16 Apr 2021 21:37:32 +0200 Subject: [PATCH 0874/1003] Update test badges in README --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index d7caf03e6..e8f6208d5 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,7 @@ Rulewerk ====== -[![Build Status](https://github.com/knowsys/rulewerk/workflows/Tests/badge.svg?branch=master)](https://github.com/knowsys/rulewerk/actions?query=workflow:Tests) +[![Build Status](https://github.com/knowsys/rulewerk/workflows/Rulewerk+Unit+Tests/badge.svg?branch=master)](https://github.com/knowsys/rulewerk/actions?query=workflow:Rulewerk+Unit+Tests) +[![Integration Test Status](https://github.com/knowsys/rulewerk/workflows/Rulewerk+Integration+Tests/badge.svg?branch=master)](https://github.com/knowsys/rulewerk/actions?query=workflow:Rulewerk+Integration+Tests) [![Coverage Status](https://coveralls.io/repos/github/knowsys/rulewerk/badge.svg?branch=master)](https://coveralls.io/github/knowsys/rulewerk?branch=master) [![Maven Central](https://maven-badges.herokuapp.com/maven-central/org.semanticweb.rulewerk/rulewerk-parent/badge.svg)](http://search.maven.org/#search|ga|1|g%3A%22org.semanticweb.rulewerk%22) From 89ace929324f9b527c56082c352af54f91f82cae Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 16 Apr 2021 21:38:55 +0200 Subject: [PATCH 0875/1003] Fix badges --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index e8f6208d5..5f6b233a4 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ Rulewerk ====== -[![Build Status](https://github.com/knowsys/rulewerk/workflows/Rulewerk+Unit+Tests/badge.svg?branch=master)](https://github.com/knowsys/rulewerk/actions?query=workflow:Rulewerk+Unit+Tests) -[![Integration Test Status](https://github.com/knowsys/rulewerk/workflows/Rulewerk+Integration+Tests/badge.svg?branch=master)](https://github.com/knowsys/rulewerk/actions?query=workflow:Rulewerk+Integration+Tests) +[![Build Status](https://github.com/knowsys/rulewerk/workflows/Rulewerk%20Unit%20Tests/badge.svg?branch=master)](https://github.com/knowsys/rulewerk/actions?query=workflow:Rulewerk+Unit+Tests) +[![Integration Test Status](https://github.com/knowsys/rulewerk/workflows/Rulewerk%20Integration%20Tests/badge.svg?branch=master)](https://github.com/knowsys/rulewerk/actions?query=workflow:Rulewerk+Integration+Tests) [![Coverage Status](https://coveralls.io/repos/github/knowsys/rulewerk/badge.svg?branch=master)](https://coveralls.io/github/knowsys/rulewerk?branch=master) [![Maven Central](https://maven-badges.herokuapp.com/maven-central/org.semanticweb.rulewerk/rulewerk-parent/badge.svg)](http://search.maven.org/#search|ga|1|g%3A%22org.semanticweb.rulewerk%22) From 5a792995129fdbcf9f3a7a6f8b5a5cddb0079180 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 20 May 2021 17:38:25 +0200 Subject: [PATCH 0876/1003] Use vlog-1.35 for CI --- .envrc | 1 + vlog/kognac.nix | 3 ++- vlog/trident.nix | 7 ++++--- vlog/vlog.nix | 5 +++-- 4 files changed, 10 insertions(+), 6 deletions(-) create mode 100644 .envrc diff --git a/.envrc b/.envrc new file mode 100644 index 000000000..1d953f4bd --- /dev/null +++ b/.envrc @@ -0,0 +1 @@ +use nix diff --git a/vlog/kognac.nix b/vlog/kognac.nix index b239aca66..9faa9400e 100644 --- a/vlog/kognac.nix +++ b/vlog/kognac.nix @@ -1,6 +1,7 @@ { pkgs, lz4, git, zlib, cmake, cacert, sparsehash, ...}: pkgs.stdenv.mkDerivation { - name = "kognac-unstable-2020-12-01"; + name = "kognac-unstable"; + version = "2020-12-01"; src = pkgs.fetchgit { url = "git://github.com/karmaresearch/kognac"; rev = "8430b081f8d76b11fa6858f3ec31a9ea5a5cf6a9"; diff --git a/vlog/trident.nix b/vlog/trident.nix index df418aa77..b6b0b8a1b 100644 --- a/vlog/trident.nix +++ b/vlog/trident.nix @@ -1,10 +1,11 @@ { pkgs, lz4, git, zlib, cmake, cacert, sparsehash, kognac, ... }: pkgs.stdenv.mkDerivation { - name = "trident-unstable-2021-04-01"; + name = "trident-unstable"; + version = "2021-05-18"; src = pkgs.fetchgit { url = "git://github.com/karmaresearch/trident"; - rev = "087e90509434f84e927251c0aa8f1dd91dbb64c7"; - sha256 = "01qw93b0hvvr7vgk24d550mvrcj7lb5chrkh0y68x1mr01di2a87"; + rev = "c24179a17fac7d3ec8214aff9b97b41b21e981b4"; + sha256 = "0bi0366ngk162xjll1cxys6hfynw2xksz1yr7l6hdsx0bx9qvrw4"; }; buildInputs = [ zlib sparsehash lz4 ]; diff --git a/vlog/vlog.nix b/vlog/vlog.nix index e74eea379..761784481 100644 --- a/vlog/vlog.nix +++ b/vlog/vlog.nix @@ -1,10 +1,11 @@ { pkgs, lz4, git, jdk, curl, zlib, cmake, cacert, sparsehash, kognac, trident, ... }: pkgs.stdenv.mkDerivation { name = "vlog"; + version = "1.35"; src = pkgs.fetchgit { url = "git://github.com/karmaresearch/vlog"; - rev = "7356ed98db064ee30300950441716545b819f3a1"; - sha256 = "127jykvgvikyv8nw4ih73qs6cin6ck5bfc0p53svv7hh9zn7vaj2"; + rev = "ca0669424963765d08a63a29a0d89e27cf33ef51"; + sha256 = "10xkc8qfarz3garn2x88p064mx109vqayiijk6zslhmn4r7j465k"; }; buildInputs = [ kognac trident sparsehash jdk curl lz4 ]; From 1a681108ffb78abcdc12fd423c630a013ac48ca6 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 20 May 2021 17:56:20 +0200 Subject: [PATCH 0877/1003] CI: properly fail workflow when integration tests fail --- .github/workflows/integration-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 4f34ae373..5feda97d4 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -20,4 +20,4 @@ jobs: BRANCH_NAME_OR_REF: ${{ github.head_ref || github.ref }} run: echo "name=BRANCH_NAME::${BRANCH_NAME_OR_REF#refs/heads/}" >> $GITHUB_ENV - run: echo "name=PR_NUMBER::$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")" >> $GITHUB_ENV - - run: nix-shell --run "mvn --no-transfer-progress failsafe:integration-test" + - run: nix-shell --run "mvn --no-transfer-progress failsafe:verify" From c74d93d6056292df1f9e5a9f750b7e46139b80dc Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 20 May 2021 18:13:17 +0200 Subject: [PATCH 0878/1003] CI: run propery verify phase for integration tests, skip unit tests --- .github/workflows/integration-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 5feda97d4..7f94af9dc 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -20,4 +20,4 @@ jobs: BRANCH_NAME_OR_REF: ${{ github.head_ref || github.ref }} run: echo "name=BRANCH_NAME::${BRANCH_NAME_OR_REF#refs/heads/}" >> $GITHUB_ENV - run: echo "name=PR_NUMBER::$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")" >> $GITHUB_ENV - - run: nix-shell --run "mvn --no-transfer-progress failsafe:verify" + - run: nix-shell --run "mvn --no-transfer-progress -Dtest=!*Test -DfailIfNoTests=false verify" From f1205099b50006909b513848bf463d80151f7003 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 20 May 2021 18:20:08 +0200 Subject: [PATCH 0879/1003] CI: just run "mvn verify" for integration tests --- .github/workflows/integration-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 7f94af9dc..09a4043ce 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -20,4 +20,4 @@ jobs: BRANCH_NAME_OR_REF: ${{ github.head_ref || github.ref }} run: echo "name=BRANCH_NAME::${BRANCH_NAME_OR_REF#refs/heads/}" >> $GITHUB_ENV - run: echo "name=PR_NUMBER::$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")" >> $GITHUB_ENV - - run: nix-shell --run "mvn --no-transfer-progress -Dtest=!*Test -DfailIfNoTests=false verify" + - run: nix-shell --run "mvn --no-transfer-progress verify" From 17e9a1597b9172e61fee05f58eb863a56144748d Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 20 May 2021 18:22:12 +0200 Subject: [PATCH 0880/1003] Fix naming of integration test for vlog issue 69 --- .../vlogissues/{VLogIssue69.java => VLogIssue69IT.java} | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) rename rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/{VLogIssue69.java => VLogIssue69IT.java} (92%) diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69IT.java similarity index 92% rename from rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69.java rename to rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69IT.java index 73f7cddd8..76c897cff 100644 --- a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69.java +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69IT.java @@ -30,10 +30,10 @@ import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; -public class VLogIssue69 extends VLogIssue { +public class VLogIssue69IT extends VLogIssue { @Test - public void test() throws ParsingException, IOException { + public void ruleset_succeeds() throws ParsingException, IOException { try (final Reasoner reasoner = getReasonerWithKbFromResource("vlog/67.rls")) { reasoner.reason(); From f65b1f06283d3d892a62dc64b1f9a2ccf0552058 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 20 May 2021 18:22:35 +0200 Subject: [PATCH 0881/1003] Fix integration test for vlog issue 69 --- .../rulewerk/integrationtests/vlogissues/VLogIssue69IT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69IT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69IT.java index 76c897cff..1ada7cc7e 100644 --- a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69IT.java +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69IT.java @@ -34,7 +34,7 @@ public class VLogIssue69IT extends VLogIssue { @Test public void ruleset_succeeds() throws ParsingException, IOException { - try (final Reasoner reasoner = getReasonerWithKbFromResource("vlog/67.rls")) { + try (final Reasoner reasoner = getReasonerWithKbFromResource("vlog/69.rls")) { reasoner.reason(); PositiveLiteral query1 = RuleParser.parsePositiveLiteral("prec(?X,?Y)"); From 6edcd6e62ce46e88e51ff7e18215a5039d1a82a2 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 20 May 2021 19:09:24 +0200 Subject: [PATCH 0882/1003] Fix integration test for VLog issue 61 --- .../vlogissues/VLogIssue61IT.java | 24 ++++++++++++++++--- 1 file changed, 21 insertions(+), 3 deletions(-) diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61IT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61IT.java index a79b34c4a..c5f736ef5 100644 --- a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61IT.java +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61IT.java @@ -20,25 +20,43 @@ * #L% */ -import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; import java.io.IOException; +import java.util.List; import org.junit.Test; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; public class VLogIssue61IT extends VLogIssue { + boolean hasCorrectAnswers(QueryResultIterator answers) { + int numAnswers = 0; + boolean hasEqualNullsAnswer = false; + + while (answers.hasNext()) { + ++numAnswers; + + List terms = answers.next().getTerms(); + hasEqualNullsAnswer = hasEqualNullsAnswer || (terms.get(1).equals(terms.get(2))); + } + + return hasEqualNullsAnswer && numAnswers <= 2; + } + @Test public void ruleset01_succeeds() throws ParsingException, IOException { try (final Reasoner reasoner = getReasonerWithKbFromResource("vlog/61-1.rls")) { reasoner.reason(); PositiveLiteral query = RuleParser.parsePositiveLiteral("q(?X,?Y,?Z)"); - assertEquals(2, reasoner.countQueryAnswers(query, true).getCount()); + assertTrue(hasCorrectAnswers(reasoner.answerQuery(query, true))); } } @@ -48,7 +66,7 @@ public void ruleset02_succeeds() throws ParsingException, IOException { reasoner.reason(); PositiveLiteral query = RuleParser.parsePositiveLiteral("q(?X,?Y,?Z)"); - assertEquals(2, reasoner.countQueryAnswers(query, true).getCount()); + assertTrue(hasCorrectAnswers(reasoner.answerQuery(query, true))); } } } From f1850727c9371e7b74b4abc98c903bcfa72cc4e6 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Sun, 23 May 2021 13:35:39 +0200 Subject: [PATCH 0883/1003] removed unused field variable --- .../rulewerk/parser/RuleParserTest.java | 357 +++++++++--------- 1 file changed, 179 insertions(+), 178 deletions(-) diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java index 797d6c59c..0e62bdb79 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java @@ -19,8 +19,10 @@ * limitations under the License. * #L% */ -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; import java.util.ArrayList; import java.util.Arrays; @@ -54,213 +56,212 @@ public class RuleParserTest implements ParserTestUtils { private final Constant e = Expressions.makeAbstractConstant("https://example.org/e"); private final Constant abc = Expressions.makeDatatypeConstant("abc", PrefixDeclarationRegistry.XSD_STRING); private final Constant xyz = Expressions.makeDatatypeConstant("xyz", PrefixDeclarationRegistry.XSD_STRING); - private final Literal atom1 = Expressions.makePositiveLiteral("http://example.org/p", x, c); - private final Literal negAtom1 = Expressions.makeNegativeLiteral("http://example.org/p", x, c); - private final Literal atom2 = Expressions.makePositiveLiteral("http://example.org/p", x, z); - private final PositiveLiteral atom3 = Expressions.makePositiveLiteral("http://example.org/q", x, y); - private final PositiveLiteral atom4 = Expressions.makePositiveLiteral("http://example.org/r", x, d); - private final PositiveLiteral fact1 = Expressions.makePositiveLiteral("http://example.org/s", c); - private final PositiveLiteral fact2 = Expressions.makePositiveLiteral("p", abc); - private final PositiveLiteral fact3 = Expressions.makePositiveLiteral("http://example.org/p", abc); - private final PositiveLiteral fact4 = Expressions.makePositiveLiteral("https://example.org/s", e); - private final PositiveLiteral fact5 = Expressions.makePositiveLiteral("q", xyz); - private final PositiveLiteral fact6 = Expressions.makePositiveLiteral("http://example.org/p", abc); - private final Conjunction body1 = Expressions.makeConjunction(atom1, atom2); - private final Conjunction body2 = Expressions.makeConjunction(negAtom1, atom2); - private final Conjunction head = Expressions.makePositiveConjunction(atom3, atom4); - private final Rule rule1 = Expressions.makeRule(head, body1); - private final Rule rule2 = Expressions.makeRule(head, body2); + private final Literal atom1 = Expressions.makePositiveLiteral("http://example.org/p", this.x, this.c); + private final Literal negAtom1 = Expressions.makeNegativeLiteral("http://example.org/p", this.x, this.c); + private final Literal atom2 = Expressions.makePositiveLiteral("http://example.org/p", this.x, this.z); + private final PositiveLiteral atom3 = Expressions.makePositiveLiteral("http://example.org/q", this.x, this.y); + private final PositiveLiteral atom4 = Expressions.makePositiveLiteral("http://example.org/r", this.x, this.d); + private final PositiveLiteral fact1 = Expressions.makePositiveLiteral("http://example.org/s", this.c); + private final PositiveLiteral fact2 = Expressions.makePositiveLiteral("p", this.abc); + private final PositiveLiteral fact3 = Expressions.makePositiveLiteral("http://example.org/p", this.abc); + private final PositiveLiteral fact4 = Expressions.makePositiveLiteral("https://example.org/s", this.e); + private final PositiveLiteral fact5 = Expressions.makePositiveLiteral("q", this.xyz); + private final Conjunction body1 = Expressions.makeConjunction(this.atom1, this.atom2); + private final Conjunction body2 = Expressions.makeConjunction(this.negAtom1, this.atom2); + private final Conjunction head = Expressions.makePositiveConjunction(this.atom3, this.atom4); + private final Rule rule1 = Expressions.makeRule(this.head, this.body1); + private final Rule rule2 = Expressions.makeRule(this.head, this.body2); @Test public void testExplicitIri() throws ParsingException { - String input = "() ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(fact1), statements); + final String input = "() ."; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(this.fact1), statements); } @Test public void testPrefixResolution() throws ParsingException { - String input = "@prefix ex: . ex:s(ex:c) ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(fact1), statements); + final String input = "@prefix ex: . ex:s(ex:c) ."; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(this.fact1), statements); } @Test public void testBaseRelativeResolution() throws ParsingException { - String input = "@base . () ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(fact1), statements); + final String input = "@base . () ."; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(this.fact1), statements); } @Test public void testBaseResolution() throws ParsingException { - String input = "@base . s(c) ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(fact1), statements); + final String input = "@base . s(c) ."; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(this.fact1), statements); } @Test public void testNoBaseRelativeIri() throws ParsingException { - String input = "s(c) ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - PositiveLiteral atom = Expressions.makePositiveLiteral("s", Expressions.makeAbstractConstant("c")); + final String input = "s(c) ."; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + final PositiveLiteral atom = Expressions.makePositiveLiteral("s", Expressions.makeAbstractConstant("c")); assertEquals(Arrays.asList(atom), statements); } @Test(expected = ParsingException.class) public void testPrefixConflict() throws ParsingException { - String input = "@prefix ex: . @prefix ex: . s(c) ."; + final String input = "@prefix ex: . @prefix ex: . s(c) ."; RuleParser.parse(input); } @Test(expected = ParsingException.class) public void testBaseConflict() throws ParsingException { - String input = "@base . @base . s(c) ."; + final String input = "@base . @base . s(c) ."; RuleParser.parse(input); } @Test(expected = ParsingException.class) public void testMissingPrefix() throws ParsingException { - String input = "ex:s(c) ."; + final String input = "ex:s(c) ."; RuleParser.parse(input); } @Test(expected = ParsingException.class) public void testNoUniversalLiterals() throws ParsingException { - String input = "p(?X) ."; + final String input = "p(?X) ."; RuleParser.parse(input); } @Test(expected = ParsingException.class) public void testNoExistentialLiterals() throws ParsingException { - String input = "p(!X) ."; + final String input = "p(!X) ."; RuleParser.parse(input); } @Test public void testSimpleRule() throws ParsingException { - String input = "@base . " + " q(?X, !Y), r(?X, d) :- p(?X,c), p(?X,?Z) . "; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(rule1), statements); + final String input = "@base . " + " q(?X, !Y), r(?X, d) :- p(?X,c), p(?X,?Z) . "; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(this.rule1), statements); } @Test public void testFactWithCommentSymbol() throws ParsingException { - String input = "t(\"%test\") . "; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + final String input = "t(\"%test\") . "; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); assertEquals(Arrays.asList(Expressions.makeFact("t", Expressions.makeDatatypeConstant("%test", PrefixDeclarationRegistry.XSD_STRING))), statements); } @Test public void testNegationRule() throws ParsingException { - String input = "@base . " + " q(?X, !Y), r(?X, d) :- ~p(?X,c), p(?X,?Z) . "; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(rule2), statements); + final String input = "@base . " + " q(?X, !Y), r(?X, d) :- ~p(?X,c), p(?X,?Z) . "; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(this.rule2), statements); } @Test(expected = ParsingException.class) public void testUnsafeNegationRule() throws ParsingException { - String input = "@base . " + " q(?X, !Y), r(?X, d) :- ~p(?Y,c), p(?X,?Z) . "; + final String input = "@base . " + " q(?X, !Y), r(?X, d) :- ~p(?Y,c), p(?X,?Z) . "; RuleParser.parse(input); } @Test public void testWhiteSpace() throws ParsingException { - String input = "@base \n\n . " + final String input = "@base \n\n . " + " q(?X, !Y) , r(?X, d\t ) \n\n:- p(?X,c), p(?X,\n?Z) \n. "; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(rule1), statements); + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(this.rule1), statements); } @Test(expected = ParsingException.class) public void testNoUnsafeVariables() throws ParsingException { - String input = "p(?X,?Y) :- q(?X) ."; + final String input = "p(?X,?Y) :- q(?X) ."; RuleParser.parse(input); } @Test(expected = ParsingException.class) public void testNoConflictingQuantificationVariables() throws ParsingException { - String input = "p(?X,!X) :- q(?X) ."; + final String input = "p(?X,!X) :- q(?X) ."; RuleParser.parse(input); } @Test(expected = ParsingException.class) public void testNoBodyExistential() throws ParsingException { - String input = "p(?X) :- q(?X,!Y) ."; + final String input = "p(?X) :- q(?X,!Y) ."; RuleParser.parse(input); } @Test(expected = ParsingException.class) public void testNoDollarVariables() throws ParsingException { - String input = "p($X) :- q($X) ."; + final String input = "p($X) :- q($X) ."; RuleParser.parse(input); } @Test public void testIntegerLiteral() throws ParsingException { - String input = "p(42)"; - PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", + final String input = "p(42)"; + final PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("42", PrefixDeclarationRegistry.XSD_INTEGER)); assertEquals(integerLiteral, RuleParser.parseLiteral(input)); } @Test public void testAbbreviatedIntegerLiteral() throws ParsingException { - String input = "@prefix xsd: <" + PrefixDeclarationRegistry.XSD + "> . " + "p(\"42\"^^xsd:integer) ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", + final String input = "@prefix xsd: <" + PrefixDeclarationRegistry.XSD + "> . " + "p(\"42\"^^xsd:integer) ."; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + final PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("42", PrefixDeclarationRegistry.XSD_INTEGER)); assertEquals(Arrays.asList(integerLiteral), statements); } @Test public void testFullIntegerLiteral() throws ParsingException { - String input = "p(\"42\"^^<" + PrefixDeclarationRegistry.XSD_INTEGER + "> )"; - PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", + final String input = "p(\"42\"^^<" + PrefixDeclarationRegistry.XSD_INTEGER + "> )"; + final PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("42", PrefixDeclarationRegistry.XSD_INTEGER)); assertEquals(integerLiteral, RuleParser.parseLiteral(input)); } @Test public void testDecimalLiteral() throws ParsingException { - String input = "p(-5.0)"; - PositiveLiteral decimalLiteral = Expressions.makePositiveLiteral("p", + final String input = "p(-5.0)"; + final PositiveLiteral decimalLiteral = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("-5.0", PrefixDeclarationRegistry.XSD_DECIMAL)); assertEquals(decimalLiteral, RuleParser.parseLiteral(input)); } @Test public void testDoubleLiteral() throws ParsingException { - String input = "p(4.2E9)"; - PositiveLiteral doubleLiteral = Expressions.makePositiveLiteral("p", + final String input = "p(4.2E9)"; + final PositiveLiteral doubleLiteral = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("4.2E9", PrefixDeclarationRegistry.XSD_DOUBLE)); assertEquals(doubleLiteral, RuleParser.parseLiteral(input)); } @Test public void testStringLiteral() throws ParsingException { - String input = "p(\"abc\")"; - assertEquals(fact2, RuleParser.parseLiteral(input)); + final String input = "p(\"abc\")"; + assertEquals(this.fact2, RuleParser.parseLiteral(input)); } @Test(expected = ParsingException.class) public void testIncompleteStringLiteral() throws ParsingException { - String input = "p(\"abc)"; + final String input = "p(\"abc)"; RuleParser.parseLiteral(input); } @Test public void parseLiteral_escapeSequences_succeeds() throws ParsingException { - String input = "p(\"_\\\"_\\\\_\\n_\\t_\")"; // User input: p("_\"_\\_\n_\t_") - PositiveLiteral fact = Expressions.makePositiveLiteral("p", + final String input = "p(\"_\\\"_\\\\_\\n_\\t_\")"; // User input: p("_\"_\\_\n_\t_") + final PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("_\"_\\_\n_\t_", PrefixDeclarationRegistry.XSD_STRING)); assertEquals(fact, RuleParser.parseLiteral(input)); } @Test public void parseLiteral_escapeSequences_roundTrips() throws ParsingException { - PositiveLiteral fact = Expressions.makePositiveLiteral("p", + final PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("_\"_\\_\n_\t_", PrefixDeclarationRegistry.XSD_STRING)); assertEquals(fact, RuleParser.parseLiteral(fact.toString())); } @@ -268,159 +269,159 @@ public void parseLiteral_escapeSequences_roundTrips() throws ParsingException { @Test public void parseLiteral_allEscapeSequences_succeeds() throws ParsingException { // User input: p("_\n_\t_\r_\b_\f_\'_\"_\\_") - String input = "p(\"_\\n_\\t_\\r_\\b_\\f_\\'_\\\"_\\\\_\")"; - PositiveLiteral fact = Expressions.makePositiveLiteral("p", + final String input = "p(\"_\\n_\\t_\\r_\\b_\\f_\\'_\\\"_\\\\_\")"; + final PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("_\n_\t_\r_\b_\f_\'_\"_\\_", PrefixDeclarationRegistry.XSD_STRING)); assertEquals(fact, RuleParser.parseLiteral(input)); } @Test public void parseLiteral_allEscapeSequences_roundTrips() throws ParsingException { - PositiveLiteral fact = Expressions.makePositiveLiteral("p", + final PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("_\n_\t_\r_\b_\f_\'_\"_\\_", PrefixDeclarationRegistry.XSD_STRING)); assertEquals(fact, RuleParser.parseLiteral(fact.toString())); } @Test(expected = ParsingException.class) public void parseLiteral_invalidEscapeSequence_throws() throws ParsingException { - String input = "p(\"\\ÿ\")"; + final String input = "p(\"\\ÿ\")"; RuleParser.parseLiteral(input); } @Test(expected = ParsingException.class) public void parseLiteral_incompleteEscapeAtEndOfLiteral_throws() throws ParsingException { - String input = "p(\"\\\")"; + final String input = "p(\"\\\")"; RuleParser.parseLiteral(input); } @Test public void parseLiteral_multiLineLiteral_succeeds() throws ParsingException { - String input = "p('''line 1\n\n" + "line 2\n" + "line 3''')"; // User input: p("a\"b\\c") - PositiveLiteral fact = Expressions.makePositiveLiteral("p", + final String input = "p('''line 1\n\n" + "line 2\n" + "line 3''')"; // User input: p("a\"b\\c") + final PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("line 1\n\nline 2\nline 3", PrefixDeclarationRegistry.XSD_STRING)); assertEquals(fact, RuleParser.parseLiteral(input)); } @Test public void parseLiteral_multiLineLiteral_roundTrips() throws ParsingException { - PositiveLiteral fact = Expressions.makePositiveLiteral("p", + final PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("line 1\n\nline 2\nline 3", PrefixDeclarationRegistry.XSD_STRING)); assertEquals(fact, RuleParser.parseLiteral(fact.toString())); } @Test(expected = ParsingException.class) public void testIncompleteStringLiteralMultiLine() throws ParsingException { - String input = "p('''abc\ndef'')"; + final String input = "p('''abc\ndef'')"; RuleParser.parseLiteral(input); } @Test public void testFullLiteral() throws ParsingException { - String input = "p(\"abc\"^^)"; - assertEquals(fact2, RuleParser.parseLiteral(input)); + final String input = "p(\"abc\"^^)"; + assertEquals(this.fact2, RuleParser.parseLiteral(input)); } @Test public void testUnicodeLiteral() throws ParsingException { - String input = "p(\"\\u0061\\u0062\\u0063\")"; // "abc" - assertEquals(fact2, RuleParser.parseLiteral(input)); + final String input = "p(\"\\u0061\\u0062\\u0063\")"; // "abc" + assertEquals(this.fact2, RuleParser.parseLiteral(input)); } @Test public void testUnicodeUri() throws ParsingException { - String input = "@base . @prefix ex: . ex:\\u0073(c) ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(fact1), statements); + final String input = "@base . @prefix ex: . ex:\\u0073(c) ."; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(this.fact1), statements); } @Test public void testPrefixedLiteral() throws ParsingException { - String input = "@prefix xsd: <" + PrefixDeclarationRegistry.XSD + "> . " + "p(\"abc\"^^xsd:string) ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(fact2), statements); + final String input = "@prefix xsd: <" + PrefixDeclarationRegistry.XSD + "> . " + "p(\"abc\"^^xsd:string) ."; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(this.fact2), statements); } @Test public void testLangStringLiteral() throws ParsingException { - String input = "p(\"abc\"@en-gb)"; - PositiveLiteral fact = Expressions.makePositiveLiteral("p", + final String input = "p(\"abc\"@en-gb)"; + final PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeLanguageStringConstant("abc", "en-gb")); assertEquals(fact, RuleParser.parseLiteral(input)); } @Test public void testLineComments() throws ParsingException { - String input = "@prefix ex: . % comment \n" + "%@prefix ex: \n" + final String input = "@prefix ex: . % comment \n" + "%@prefix ex: \n" + " ex:s(ex:c) . % comment \n"; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(fact1), statements); + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(this.fact1), statements); } @Test public void testPositiveLiteral() throws ParsingException { - String input = "(?X,)"; - Literal literal = RuleParser.parsePositiveLiteral(input); - assertEquals(atom1, literal); + final String input = "(?X,)"; + final Literal literal = RuleParser.parsePositiveLiteral(input); + assertEquals(this.atom1, literal); } @Test(expected = ParsingException.class) public void testPositiveLiteralError() throws ParsingException { - String input = "~ (?X,)"; + final String input = "~ (?X,)"; RuleParser.parsePositiveLiteral(input); } @Test public void testLiteral() throws ParsingException { - String input = "~ (?X,)"; - Literal literal = RuleParser.parseLiteral(input); - assertEquals(negAtom1, literal); + final String input = "~ (?X,)"; + final Literal literal = RuleParser.parseLiteral(input); + assertEquals(this.negAtom1, literal); } @Test(expected = ParsingException.class) public void tesLiteralError() throws ParsingException { - String input = "(?X, facts = result.getFacts(); + final String input = "(_:blank) ."; + final KnowledgeBase result = RuleParser.parse(input); + final List facts = result.getFacts(); assertEquals(1, facts.size()); - assertArgumentIsNamedNull(facts.get(0), 1); + this.assertArgumentIsNamedNull(facts.get(0), 1); } @Test public void parseTerm_NamedNull_succeeds() throws ParsingException { - String input = "_:blank"; - Term result = RuleParser.parseTerm(input); - assertUuid(result.getName()); + final String input = "_:blank"; + final Term result = RuleParser.parseTerm(input); + this.assertUuid(result.getName()); } @Test public void parseTerm_NamedNullInHead_succeeds() throws ParsingException { - String input = "_:blank"; - Term result = RuleParser.parseTerm(input, FormulaContext.HEAD); - assertUuid(result.getName()); + final String input = "_:blank"; + final Term result = RuleParser.parseTerm(input, FormulaContext.HEAD); + this.assertUuid(result.getName()); } @Test(expected = ParsingException.class) public void parseTerm_NamedNullInBodyContext_throws() throws ParsingException { - String input = "_:blank"; + final String input = "_:blank"; RuleParser.parseTerm(input, FormulaContext.BODY); } @Test(expected = ParsingException.class) public void testBParsingExceptione() throws ParsingException { - String input = "_:(a) ."; + final String input = "_:(a) ."; RuleParser.parse(input); } @@ -440,24 +441,24 @@ public void testNonIriTypeInDatatypeLiteral() throws ParsingException { public void testIriTypeInDatatypeLiteral() throws ParsingException { final String iri = "whatever"; final String input = "P(\"a\"^^<" + iri + ">)"; - Literal literal = RuleParser.parseLiteral(input); - DatatypeConstant result = (DatatypeConstant) literal.getConstants().toArray()[0]; + final Literal literal = RuleParser.parseLiteral(input); + final DatatypeConstant result = (DatatypeConstant) literal.getConstants().toArray()[0]; assertEquals(iri, result.getDatatype()); } @Test public void predicateRelativeNumericIRITest() throws ParsingException { - AbstractConstantImpl a = new AbstractConstantImpl("a"); - Fact f = RuleParser.parseFact("<1.e1>(a)."); // 1.e1 == "10"^^xsd:double - Fact f2 = Expressions.makeFact("1.e1", a); + final AbstractConstantImpl a = new AbstractConstantImpl("a"); + final Fact f = RuleParser.parseFact("<1.e1>(a)."); // 1.e1 == "10"^^xsd:double + final Fact f2 = Expressions.makeFact("1.e1", a); assertEquals(f, f2); } @Test public void predicateAbsoluteIRITest() throws ParsingException { - AbstractConstantImpl a = new AbstractConstantImpl("a"); - Fact f = RuleParser.parseFact("(a)."); - Fact f2 = Expressions.makeFact("a:b", a); + final AbstractConstantImpl a = new AbstractConstantImpl("a"); + final Fact f = RuleParser.parseFact("(a)."); + final Fact f2 = Expressions.makeFact("a:b", a); assertEquals(f, f2); } @@ -479,124 +480,124 @@ public void parse_absoluteIrisInRule_succeeds() throws ParsingException { @Test public void testCustomDatatype() throws ParsingException { final String typename = "http://example.org/#test"; - DatatypeConstant constant = Expressions.makeDatatypeConstant("test", typename); - DatatypeConstantHandler handler = mock(DatatypeConstantHandler.class); - ParserConfiguration parserConfiguration = new ParserConfiguration(); + final DatatypeConstant constant = Expressions.makeDatatypeConstant("test", typename); + final DatatypeConstantHandler handler = mock(DatatypeConstantHandler.class); + final ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerDatatype(typename, handler); doReturn(constant).when(handler).createConstant(ArgumentMatchers.eq("hello, world")); - String input = "p(\"hello, world\"^^<" + typename + ">)"; - Literal literal = RuleParser.parseLiteral(input, parserConfiguration); - DatatypeConstant result = (DatatypeConstant) literal.getConstants().toArray()[0]; + final String input = "p(\"hello, world\"^^<" + typename + ">)"; + final Literal literal = RuleParser.parseLiteral(input, parserConfiguration); + final DatatypeConstant result = (DatatypeConstant) literal.getConstants().toArray()[0]; assertEquals(constant, result); } @Test public void parse_importStatement_succeeds() throws ParsingException { - String input = "@import \"src/test/resources/facts.rls\" ."; - KnowledgeBase knowledgeBase = RuleParser.parse(input); - List expected = Arrays.asList(fact1, fact2); - List result = knowledgeBase.getFacts(); + final String input = "@import \"src/test/resources/facts.rls\" ."; + final KnowledgeBase knowledgeBase = RuleParser.parse(input); + final List expected = Arrays.asList(this.fact1, this.fact2); + final List result = knowledgeBase.getFacts(); assertEquals(expected, result); } @Test public void parse_relativeImportStatement_succeeds() throws ParsingException { - String input = "@base . @import-relative \"src/test/resources/facts.rls\" ."; - KnowledgeBase knowledgeBase = RuleParser.parse(input); - List expected = Arrays.asList(fact1, fact3); - List result = knowledgeBase.getFacts(); + final String input = "@base . @import-relative \"src/test/resources/facts.rls\" ."; + final KnowledgeBase knowledgeBase = RuleParser.parse(input); + final List expected = Arrays.asList(this.fact1, this.fact3); + final List result = knowledgeBase.getFacts(); assertEquals(expected, result); } @Test public void parse_importStatement_relativeImport_succeeds() throws ParsingException { - String input = "@import \"src/test/resources/subdir/sibling.rls\" ."; - KnowledgeBase knowledgeBase = RuleParser.parse(input); - List expected = Arrays.asList(fact4, fact5); - List result = knowledgeBase.getFacts(); + final String input = "@import \"src/test/resources/subdir/sibling.rls\" ."; + final KnowledgeBase knowledgeBase = RuleParser.parse(input); + final List expected = Arrays.asList(this.fact4, this.fact5); + final List result = knowledgeBase.getFacts(); assertEquals(expected, result); } @Test public void parse_importStatement_relativeParentImport_succeeds() throws ParsingException { - String input = "@import \"src/test/resources/subdir/parent.rls\" ."; - KnowledgeBase knowledgeBase = RuleParser.parse(input); - List expected = Arrays.asList(fact1, fact2); - List result = knowledgeBase.getFacts(); + final String input = "@import \"src/test/resources/subdir/parent.rls\" ."; + final KnowledgeBase knowledgeBase = RuleParser.parse(input); + final List expected = Arrays.asList(this.fact1, this.fact2); + final List result = knowledgeBase.getFacts(); assertEquals(expected, result); } @Test public void parse_relativeImportStatement_relativeImport_succeeds() throws ParsingException { - String input = "@base . @import-relative \"src/test/resources/subdir/sibling.rls\" ."; - KnowledgeBase knowledgeBase = RuleParser.parse(input); - List expected = Arrays.asList(fact4, fact5); - List result = knowledgeBase.getFacts(); + final String input = "@base . @import-relative \"src/test/resources/subdir/sibling.rls\" ."; + final KnowledgeBase knowledgeBase = RuleParser.parse(input); + final List expected = Arrays.asList(this.fact4, this.fact5); + final List result = knowledgeBase.getFacts(); assertEquals(expected, result); } @Test public void parse_relativeImportStatement_relativeParentImport_succeeds() throws ParsingException { - String input = "@base . @import-relative \"src/test/resources/subdir/parent.rls\" ."; - KnowledgeBase knowledgeBase = RuleParser.parse(input); - List expected = Arrays.asList(fact1, fact2); - List result = knowledgeBase.getFacts(); + final String input = "@base . @import-relative \"src/test/resources/subdir/parent.rls\" ."; + final KnowledgeBase knowledgeBase = RuleParser.parse(input); + final List expected = Arrays.asList(this.fact1, this.fact2); + final List result = knowledgeBase.getFacts(); assertEquals(expected, result); } @Test public void parse_import_renamesNamedNulls() throws ParsingException { - String input = "p(_:blank) . @import \"src/test/resources/blank.rls\" ."; - KnowledgeBase knowledgeBase = RuleParser.parse(input); - List facts = knowledgeBase.getFacts(); + final String input = "p(_:blank) . @import \"src/test/resources/blank.rls\" ."; + final KnowledgeBase knowledgeBase = RuleParser.parse(input); + final List facts = knowledgeBase.getFacts(); assertEquals(2, facts.size()); - Fact fact1 = facts.get(0); - Fact fact2 = facts.get(1); + final Fact fact1 = facts.get(0); + final Fact fact2 = facts.get(1); assertNotEquals(fact1, fact2); - assertArgumentIsNamedNull(fact1, 1); - assertArgumentIsNamedNull(fact2, 1); + this.assertArgumentIsNamedNull(fact1, 1); + this.assertArgumentIsNamedNull(fact2, 1); } @Test public void parse_reusedNamedNulls_identical() throws ParsingException { - String input = "p(_:blank) . q(_:blank) . p(_:other) ."; + final String input = "p(_:blank) . q(_:blank) . p(_:other) ."; - KnowledgeBase knowledgeBase = RuleParser.parse(input); - List facts = knowledgeBase.getFacts(); + final KnowledgeBase knowledgeBase = RuleParser.parse(input); + final List facts = knowledgeBase.getFacts(); assertEquals(3, facts.size()); - Fact fact1 = facts.get(0); - Fact fact2 = facts.get(1); - Fact fact3 = facts.get(2); + final Fact fact1 = facts.get(0); + final Fact fact2 = facts.get(1); + final Fact fact3 = facts.get(2); assertEquals(fact1.getArguments().get(0), fact2.getArguments().get(0)); assertNotEquals(fact1.getArguments().get(0), fact3.getArguments().get(0)); - assertArgumentIsNamedNull(fact1, 1); - assertArgumentIsNamedNull(fact2, 1); - assertArgumentIsNamedNull(fact3, 1); + this.assertArgumentIsNamedNull(fact1, 1); + this.assertArgumentIsNamedNull(fact2, 1); + this.assertArgumentIsNamedNull(fact3, 1); } @Test public void parseInto_duplicateImportStatements_succeeds() throws ParsingException { - String input = "@import \"src/test/resources/facts.rls\" . "; - KnowledgeBase knowledgeBase = RuleParser.parse(input); + final String input = "@import \"src/test/resources/facts.rls\" . "; + final KnowledgeBase knowledgeBase = RuleParser.parse(input); RuleParser.parseInto(knowledgeBase, input); } @Test public void parseInto_duplicateRelativeImportStatements_succeeds() throws ParsingException { - String input = "@import \"src/test/resources/facts.rls\" . @import-relative \"src/test/resources/facts.rls\" ."; - KnowledgeBase knowledgeBase = RuleParser.parse(input); + final String input = "@import \"src/test/resources/facts.rls\" . @import-relative \"src/test/resources/facts.rls\" ."; + final KnowledgeBase knowledgeBase = RuleParser.parse(input); RuleParser.parseInto(knowledgeBase, input); } @Test public void parseInto_relativeImportRedeclaringBase_succeeds() throws ParsingException { - String input = "@base . @import-relative \"src/test/resources/base.rls\" ."; - KnowledgeBase knowledgeBase = RuleParser.parse(input); - List expected = Arrays.asList(fact1, fact3); - List result = knowledgeBase.getFacts(); + final String input = "@base . @import-relative \"src/test/resources/base.rls\" ."; + final KnowledgeBase knowledgeBase = RuleParser.parse(input); + final List expected = Arrays.asList(this.fact1, this.fact3); + final List result = knowledgeBase.getFacts(); assertEquals(expected, result); } } From 44766c5f304e6a8d28ff506d3ceb20fff5a81b90 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Sun, 23 May 2021 13:37:04 +0200 Subject: [PATCH 0884/1003] update dependency version to VLog --- rulewerk-vlog/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-vlog/pom.xml b/rulewerk-vlog/pom.xml index 798211aea..a4a504c49 100644 --- a/rulewerk-vlog/pom.xml +++ b/rulewerk-vlog/pom.xml @@ -17,7 +17,7 @@ Bindings for the VLog reasoner backend. - 1.3.4 + 1.3.5 vlog-java From 02d0a7b9f08d56aa86e8e18ad96772933a915fca Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Sun, 23 May 2021 13:49:30 +0200 Subject: [PATCH 0885/1003] Update RELEASE-NOTES.md --- RELEASE-NOTES.md | 1 + 1 file changed, 1 insertion(+) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index faf83bafe..58850e818 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -8,6 +8,7 @@ Bugfixes: * Encoding of RDF strings corrected to make sure VLog succeeds joining on strings * Fixed handling of trident databases that are not a direct child of the current working directory * Fixed encoding of language-tagged strings that are used in Rulewerk facts, which had caused an exception +* Several reasoning errors in VLog (backend) have been discovered and fixed in the version used now Rulewerk v0.7.0 --------------- From 9971937f7fbfd97ad917cc2a1cc2aaedc95391e3 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Sun, 23 May 2021 13:57:42 +0200 Subject: [PATCH 0886/1003] Update README.md Describe rulewerk-integrationtests test module --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 5f6b233a4..fa2c78a23 100644 --- a/README.md +++ b/README.md @@ -34,6 +34,8 @@ You need to use Java 1.8 or above. Available source modules include: * **rulewerk-commands**: support for running commands, as done by the client * **rulewerk-vlog**: support for using [VLog](https://github.com/karmaresearch/vlog) as a reasoning backend for Rulewerk. +Test module **rulewerk-integrationtests** contains integration tests that verify the correctness of the backend reasoners for various complex reasoning problems. + The released **rulewerk-vlog** packages use [`vlog-java`](https://search.maven.org/search?q=a:vlog-java), which packages system-dependent [VLog](https://github.com/karmaresearch/vlog) binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for Linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows: * (Optional) It is recommended to increase the version of `vlog-java` (in `rulewerk-vlog/pom.xml`) before executing the next steps. * Delete (if existing) previous local builds (`local_builds` directory). From 588125648017aeca882ed1f2dc5f9ad76c354cba Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Sun, 23 May 2021 14:01:19 +0200 Subject: [PATCH 0887/1003] update to release version 0.8.0 --- coverage/pom.xml | 2 +- pom.xml | 2 +- rulewerk-client/pom.xml | 2 +- rulewerk-commands/pom.xml | 2 +- rulewerk-core/pom.xml | 2 +- rulewerk-examples/pom.xml | 2 +- rulewerk-graal/pom.xml | 2 +- rulewerk-integrationtests/pom.xml | 2 +- rulewerk-owlapi/pom.xml | 2 +- rulewerk-parser/pom.xml | 2 +- rulewerk-rdf/pom.xml | 2 +- rulewerk-vlog/pom.xml | 2 +- 12 files changed, 12 insertions(+), 12 deletions(-) diff --git a/coverage/pom.xml b/coverage/pom.xml index be85d779e..8e80c639c 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0-SNAPSHOT + 0.8.0 coverage diff --git a/pom.xml b/pom.xml index 7ae152180..c235cff02 100644 --- a/pom.xml +++ b/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0-SNAPSHOT + 0.8.0 pom Rulewerk diff --git a/rulewerk-client/pom.xml b/rulewerk-client/pom.xml index 695e9a6a4..a613d4e9a 100644 --- a/rulewerk-client/pom.xml +++ b/rulewerk-client/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0-SNAPSHOT + 0.8.0 rulewerk-client diff --git a/rulewerk-commands/pom.xml b/rulewerk-commands/pom.xml index 773e18561..461e18428 100644 --- a/rulewerk-commands/pom.xml +++ b/rulewerk-commands/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0-SNAPSHOT + 0.8.0 rulewerk-commands diff --git a/rulewerk-core/pom.xml b/rulewerk-core/pom.xml index 3ab864a66..30fa5685a 100644 --- a/rulewerk-core/pom.xml +++ b/rulewerk-core/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0-SNAPSHOT + 0.8.0 rulewerk-core diff --git a/rulewerk-examples/pom.xml b/rulewerk-examples/pom.xml index 341378aec..e5caee14f 100644 --- a/rulewerk-examples/pom.xml +++ b/rulewerk-examples/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0-SNAPSHOT + 0.8.0 rulewerk-examples diff --git a/rulewerk-graal/pom.xml b/rulewerk-graal/pom.xml index 13631b3cb..b86c3e8c5 100644 --- a/rulewerk-graal/pom.xml +++ b/rulewerk-graal/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0-SNAPSHOT + 0.8.0 rulewerk-graal diff --git a/rulewerk-integrationtests/pom.xml b/rulewerk-integrationtests/pom.xml index 91bd919e3..ca7bcee5f 100644 --- a/rulewerk-integrationtests/pom.xml +++ b/rulewerk-integrationtests/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0-SNAPSHOT + 0.8.0 rulewerk-integrationtests diff --git a/rulewerk-owlapi/pom.xml b/rulewerk-owlapi/pom.xml index be81cdf49..587981a15 100644 --- a/rulewerk-owlapi/pom.xml +++ b/rulewerk-owlapi/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0-SNAPSHOT + 0.8.0 rulewerk-owlapi diff --git a/rulewerk-parser/pom.xml b/rulewerk-parser/pom.xml index e495e5247..e73738aec 100644 --- a/rulewerk-parser/pom.xml +++ b/rulewerk-parser/pom.xml @@ -8,7 +8,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0-SNAPSHOT + 0.8.0 rulewerk-parser diff --git a/rulewerk-rdf/pom.xml b/rulewerk-rdf/pom.xml index e64742507..178cba9ae 100644 --- a/rulewerk-rdf/pom.xml +++ b/rulewerk-rdf/pom.xml @@ -8,7 +8,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0-SNAPSHOT + 0.8.0 rulewerk-rdf diff --git a/rulewerk-vlog/pom.xml b/rulewerk-vlog/pom.xml index a4a504c49..ab257bf3c 100644 --- a/rulewerk-vlog/pom.xml +++ b/rulewerk-vlog/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0-SNAPSHOT + 0.8.0 rulewerk-vlog From 9dd07d0484673d3210fa7627c7fb9dc126e1d397 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 25 May 2021 16:26:15 +0200 Subject: [PATCH 0888/1003] update to snapshot version --- coverage/pom.xml | 2 +- pom.xml | 2 +- rulewerk-client/pom.xml | 2 +- rulewerk-commands/pom.xml | 2 +- rulewerk-core/pom.xml | 2 +- rulewerk-examples/pom.xml | 2 +- rulewerk-graal/pom.xml | 2 +- rulewerk-integrationtests/pom.xml | 2 +- rulewerk-owlapi/pom.xml | 2 +- rulewerk-parser/pom.xml | 2 +- rulewerk-rdf/pom.xml | 2 +- rulewerk-vlog/pom.xml | 2 +- 12 files changed, 12 insertions(+), 12 deletions(-) diff --git a/coverage/pom.xml b/coverage/pom.xml index 8e80c639c..5d59b5e78 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0 + 0.9.0-SNAPSHOT coverage diff --git a/pom.xml b/pom.xml index c235cff02..bb8f5d302 100644 --- a/pom.xml +++ b/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0 + 0.9.0-SNAPSHOT pom Rulewerk diff --git a/rulewerk-client/pom.xml b/rulewerk-client/pom.xml index a613d4e9a..079f43e32 100644 --- a/rulewerk-client/pom.xml +++ b/rulewerk-client/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0 + 0.9.0-SNAPSHOT rulewerk-client diff --git a/rulewerk-commands/pom.xml b/rulewerk-commands/pom.xml index 461e18428..436c9f7a8 100644 --- a/rulewerk-commands/pom.xml +++ b/rulewerk-commands/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0 + 0.9.0-SNAPSHOT rulewerk-commands diff --git a/rulewerk-core/pom.xml b/rulewerk-core/pom.xml index 30fa5685a..bb19807a6 100644 --- a/rulewerk-core/pom.xml +++ b/rulewerk-core/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0 + 0.9.0-SNAPSHOT rulewerk-core diff --git a/rulewerk-examples/pom.xml b/rulewerk-examples/pom.xml index e5caee14f..5daeac2c1 100644 --- a/rulewerk-examples/pom.xml +++ b/rulewerk-examples/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0 + 0.9.0-SNAPSHOT rulewerk-examples diff --git a/rulewerk-graal/pom.xml b/rulewerk-graal/pom.xml index b86c3e8c5..41467c8d5 100644 --- a/rulewerk-graal/pom.xml +++ b/rulewerk-graal/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0 + 0.9.0-SNAPSHOT rulewerk-graal diff --git a/rulewerk-integrationtests/pom.xml b/rulewerk-integrationtests/pom.xml index ca7bcee5f..6202b8896 100644 --- a/rulewerk-integrationtests/pom.xml +++ b/rulewerk-integrationtests/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0 + 0.9.0-SNAPSHOT rulewerk-integrationtests diff --git a/rulewerk-owlapi/pom.xml b/rulewerk-owlapi/pom.xml index 587981a15..f4c4cbff9 100644 --- a/rulewerk-owlapi/pom.xml +++ b/rulewerk-owlapi/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0 + 0.9.0-SNAPSHOT rulewerk-owlapi diff --git a/rulewerk-parser/pom.xml b/rulewerk-parser/pom.xml index e73738aec..982e14f02 100644 --- a/rulewerk-parser/pom.xml +++ b/rulewerk-parser/pom.xml @@ -8,7 +8,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0 + 0.9.0-SNAPSHOT rulewerk-parser diff --git a/rulewerk-rdf/pom.xml b/rulewerk-rdf/pom.xml index 178cba9ae..9c93045bc 100644 --- a/rulewerk-rdf/pom.xml +++ b/rulewerk-rdf/pom.xml @@ -8,7 +8,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0 + 0.9.0-SNAPSHOT rulewerk-rdf diff --git a/rulewerk-vlog/pom.xml b/rulewerk-vlog/pom.xml index ab257bf3c..4c0837a50 100644 --- a/rulewerk-vlog/pom.xml +++ b/rulewerk-vlog/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0 + 0.9.0-SNAPSHOT rulewerk-vlog From ef0dd26bb348bb9cdd13c375c9349213def9866f Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 25 May 2021 16:27:45 +0200 Subject: [PATCH 0889/1003] Update README.md update latest release version --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index fa2c78a23..884fa89ed 100644 --- a/README.md +++ b/README.md @@ -10,13 +10,13 @@ A Java library based on the [VLog rule engine](https://github.com/karmaresearch/ Installation ------------ -The current release of Rulewerk is version 0.7.0. The easiest way of using the library is with Maven. Maven users must add the following dependency to the dependencies in their pom.xml file: +The current release of Rulewerk is version 0.8.0. The easiest way of using the library is with Maven. Maven users must add the following dependency to the dependencies in their pom.xml file: ``` org.semanticweb.rulewerk rulewerk-core - 0.7.0 + 0.8.0 ``` From 9d739549af1c32dd5fe8361866d6aba4a48f06f1 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 25 May 2021 16:55:49 +0200 Subject: [PATCH 0890/1003] fix javadoc errors --- .../rulewerk/client/picocli/Main.java | 8 +- .../rulewerk/core/model/api/Term.java | 3 +- .../core/model/implementation/Serializer.java | 219 +++++++++--------- .../rulewerk/core/reasoner/Reasoner.java | 20 +- .../DataSourceConfigurationVisitor.java | 6 +- .../implementation/Skolemization.java | 36 +-- .../rulewerk/rdf/RdfModelConverter.java | 44 ++-- 7 files changed, 169 insertions(+), 167 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java index f8b59bb0a..0193bc7da 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java @@ -52,12 +52,12 @@ public class Main { * Launches the client application for Rulewerk. The functionality depends on * the given command-line args ({@code args}): *
      - *
    • empty args ("") or argument "shell"
    • launch an - * interactive shell. + *
    • empty args ("") or argument "shell" launch an interactive + * shell.
    • *
    • argument "materialize" can be used with different options to complete * several materialization and querying tasks from the command line.
    • - *
    *
  • help
  • + * * * @param args * @@ -92,7 +92,7 @@ static void displayHelp(final String[] args, final PrintStream printStream) { /** * Configures {@link Logger} settings. Messages are logged to the console. Log - * level is set to {@link Level.FATAL}. + * level is set to {@link Level#FATAL}. */ public static void configureLogging() { // Create the appender that will write log messages to the console. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Term.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Term.java index 3bbabcfe4..bb0be440d 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Term.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Term.java @@ -65,8 +65,9 @@ default boolean isVariable() { /** * Accept a {@link TermVisitor} and return its output. - * + * * @param termVisitor the TermVisitor + * @param type associated to the given TermVisitor * @return output of the visitor */ T accept(TermVisitor termVisitor); diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index 741aba0a5..258d0f7f9 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -40,6 +40,7 @@ import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; import org.semanticweb.rulewerk.core.model.api.Literal; import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Rule; @@ -70,7 +71,7 @@ public class Serializer { */ public static final Function identityIriSerializer = new Function() { @Override - public String apply(String iri) { + public String apply(final String iri) { if (iri.contains(":") || !iri.matches(AbstractPrefixDeclarationRegistry.REGEXP_LOCNAME)) { return "<" + iri + ">"; } else { @@ -103,13 +104,13 @@ private class RuntimeIoException extends RuntimeException { private static final long serialVersionUID = 1L; final IOException cause; - public RuntimeIoException(IOException cause) { + public RuntimeIoException(final IOException cause) { super(cause); this.cause = cause; } public IOException getIoException() { - return cause; + return this.cause; } } @@ -122,60 +123,60 @@ public IOException getIoException() { private class SerializerTermVisitor implements TermVisitor { @Override - public Void visit(AbstractConstant term) { + public Void visit(final AbstractConstant term) { try { Serializer.this.writeAbstractConstant(term); - } catch (IOException e) { + } catch (final IOException e) { throw new RuntimeIoException(e); } return null; } @Override - public Void visit(DatatypeConstant term) { + public Void visit(final DatatypeConstant term) { try { Serializer.this.writeDatatypeConstant(term); - } catch (IOException e) { + } catch (final IOException e) { throw new RuntimeIoException(e); } return null; } @Override - public Void visit(LanguageStringConstant term) { + public Void visit(final LanguageStringConstant term) { try { Serializer.this.writeLanguageStringConstant(term); - } catch (IOException e) { + } catch (final IOException e) { throw new RuntimeIoException(e); } return null; } @Override - public Void visit(UniversalVariable term) { + public Void visit(final UniversalVariable term) { try { Serializer.this.writeUniversalVariable(term); - } catch (IOException e) { + } catch (final IOException e) { throw new RuntimeIoException(e); } return null; } @Override - public Void visit(ExistentialVariable term) { + public Void visit(final ExistentialVariable term) { try { Serializer.this.writeExistentialVariable(term); - } catch (IOException e) { + } catch (final IOException e) { throw new RuntimeIoException(e); } return null; } @Override - public Void visit(NamedNull term) { + public Void visit(final NamedNull term) { try { Serializer.this.writeNamedNull(term); - } catch (IOException e) { + } catch (final IOException e) { throw new RuntimeIoException(e); } return null; @@ -192,30 +193,30 @@ public Void visit(NamedNull term) { private class SerializerStatementVisitor implements StatementVisitor { @Override - public Void visit(Fact statement) { + public Void visit(final Fact statement) { try { Serializer.this.writeFact(statement); - } catch (IOException e) { + } catch (final IOException e) { throw new RuntimeIoException(e); } return null; } @Override - public Void visit(Rule statement) { + public Void visit(final Rule statement) { try { Serializer.this.writeRule(statement); - } catch (IOException e) { + } catch (final IOException e) { throw new RuntimeIoException(e); } return null; } @Override - public Void visit(DataSourceDeclaration statement) { + public Void visit(final DataSourceDeclaration statement) { try { Serializer.this.writeDataSourceDeclaration(statement); - } catch (IOException e) { + } catch (final IOException e) { throw new RuntimeIoException(e); } return null; @@ -252,7 +253,7 @@ public Serializer(final Writer writer) { * @param writer the object used to write serializations * @param prefixDeclarationRegistry the object used to abbreviate IRIs */ - public Serializer(final Writer writer, PrefixDeclarationRegistry prefixDeclarationRegistry) { + public Serializer(final Writer writer, final PrefixDeclarationRegistry prefixDeclarationRegistry) { this(writer, (string) -> { return prefixDeclarationRegistry.unresolveAbsoluteIri(string, true); }); @@ -261,13 +262,13 @@ public Serializer(final Writer writer, PrefixDeclarationRegistry prefixDeclarati /** * Writes a serialization of the given {@link Statement}. * - * @param term a {@link Statement} + * @param statement a {@link Statement} to serialize * @throws IOException */ - public void writeStatement(Statement statement) throws IOException { + public void writeStatement(final Statement statement) throws IOException { try { statement.accept(this.serializerStatementVisitor); - } catch (Serializer.RuntimeIoException e) { + } catch (final Serializer.RuntimeIoException e) { throw e.getIoException(); } } @@ -278,9 +279,9 @@ public void writeStatement(Statement statement) throws IOException { * @param fact a {@link Fact} * @throws IOException */ - public void writeFact(Fact fact) throws IOException { - writeLiteral(fact); - writer.write(STATEMENT_END); + public void writeFact(final Fact fact) throws IOException { + this.writeLiteral(fact); + this.writer.write(STATEMENT_END); } /** @@ -289,9 +290,9 @@ public void writeFact(Fact fact) throws IOException { * @param rule a {@link Rule} * @throws IOException */ - public void writeRule(Rule rule) throws IOException { - writeRuleNoStatment(rule); - writer.write(STATEMENT_END); + public void writeRule(final Rule rule) throws IOException { + this.writeRuleNoStatment(rule); + this.writer.write(STATEMENT_END); } /** @@ -300,10 +301,10 @@ public void writeRule(Rule rule) throws IOException { * @param rule a {@link Rule} * @throws IOException */ - private void writeRuleNoStatment(Rule rule) throws IOException { - writeLiteralConjunction(rule.getHead()); - writer.write(" :- "); - writeLiteralConjunction(rule.getBody()); + private void writeRuleNoStatment(final Rule rule) throws IOException { + this.writeLiteralConjunction(rule.getHead()); + this.writer.write(" :- "); + this.writeLiteralConjunction(rule.getBody()); } /** @@ -312,12 +313,12 @@ private void writeRuleNoStatment(Rule rule) throws IOException { * @param dataSourceDeclaration a {@link DataSourceDeclaration} * @throws IOException */ - public void writeDataSourceDeclaration(DataSourceDeclaration dataSourceDeclaration) throws IOException { - writer.write("@source "); - writePredicate(dataSourceDeclaration.getPredicate()); - writer.write(": "); - writeLiteral(dataSourceDeclaration.getDataSource().getDeclarationFact()); - writer.write(STATEMENT_END); + public void writeDataSourceDeclaration(final DataSourceDeclaration dataSourceDeclaration) throws IOException { + this.writer.write("@source "); + this.writePredicate(dataSourceDeclaration.getPredicate()); + this.writer.write(": "); + this.writeLiteral(dataSourceDeclaration.getDataSource().getDeclarationFact()); + this.writer.write(STATEMENT_END); } /** @@ -326,11 +327,11 @@ public void writeDataSourceDeclaration(DataSourceDeclaration dataSourceDeclarati * @param literal a {@link Literal} * @throws IOException */ - public void writeLiteral(Literal literal) throws IOException { + public void writeLiteral(final Literal literal) throws IOException { if (literal.isNegated()) { - writer.write("~"); + this.writer.write("~"); } - writePositiveLiteral(literal.getPredicate(), literal.getArguments()); + this.writePositiveLiteral(literal.getPredicate(), literal.getArguments()); } /** @@ -341,21 +342,21 @@ public void writeLiteral(Literal literal) throws IOException { * @param arguments a list of {@link Term} arguments * @throws IOException */ - public void writePositiveLiteral(Predicate predicate, List arguments) throws IOException { - writer.write(getIri(predicate.getName())); - writer.write("("); + public void writePositiveLiteral(final Predicate predicate, final List arguments) throws IOException { + this.writer.write(this.getIri(predicate.getName())); + this.writer.write("("); boolean first = true; for (final Term term : arguments) { if (first) { first = false; } else { - writer.write(", "); + this.writer.write(", "); } - writeTerm(term); + this.writeTerm(term); } - writer.write(")"); + this.writer.write(")"); } /** @@ -371,9 +372,9 @@ public void writeLiteralConjunction(final Conjunction literal if (first) { first = false; } else { - writer.write(", "); + this.writer.write(", "); } - writeLiteral(literal); + this.writeLiteral(literal); } } @@ -384,11 +385,11 @@ public void writeLiteralConjunction(final Conjunction literal * @param predicate a {@link Predicate} * @throws IOException */ - public void writePredicate(Predicate predicate) throws IOException { - writer.write(getIri(predicate.getName())); - writer.write("["); - writer.write(String.valueOf(predicate.getArity())); - writer.write("]"); + public void writePredicate(final Predicate predicate) throws IOException { + this.writer.write(this.getIri(predicate.getName())); + this.writer.write("["); + this.writer.write(String.valueOf(predicate.getArity())); + this.writer.write("]"); } /** @@ -397,10 +398,10 @@ public void writePredicate(Predicate predicate) throws IOException { * @param term a {@link Term} * @throws IOException */ - public void writeTerm(Term term) throws IOException { + public void writeTerm(final Term term) throws IOException { try { term.accept(this.serializerTermVisitor); - } catch (Serializer.RuntimeIoException e) { + } catch (final Serializer.RuntimeIoException e) { throw e.getIoException(); } } @@ -411,8 +412,8 @@ public void writeTerm(Term term) throws IOException { * @param abstractConstant a {@link AbstractConstant} * @throws IOException */ - public void writeAbstractConstant(AbstractConstant abstractConstant) throws IOException { - writer.write(getIri(abstractConstant.getName())); + public void writeAbstractConstant(final AbstractConstant abstractConstant) throws IOException { + this.writer.write(this.getIri(abstractConstant.getName())); } /** @@ -421,13 +422,13 @@ public void writeAbstractConstant(AbstractConstant abstractConstant) throws IOEx * @param datatypeConstant a {@link DatatypeConstant} * @throws IOException */ - public void writeDatatypeConstant(DatatypeConstant datatypeConstant) throws IOException { + public void writeDatatypeConstant(final DatatypeConstant datatypeConstant) throws IOException { if (PrefixDeclarationRegistry.XSD_STRING.equals(datatypeConstant.getDatatype())) { - writer.write(getQuotedString(datatypeConstant.getLexicalValue())); + this.writer.write(this.getQuotedString(datatypeConstant.getLexicalValue())); } else if (PrefixDeclarationRegistry.XSD_INTEGER.equals(datatypeConstant.getDatatype())) { - writer.write(datatypeConstant.getLexicalValue()); + this.writer.write(datatypeConstant.getLexicalValue()); } else { - writeDatatypeConstantNoAbbreviations(datatypeConstant); + this.writeDatatypeConstantNoAbbreviations(datatypeConstant); } } @@ -438,10 +439,10 @@ public void writeDatatypeConstant(DatatypeConstant datatypeConstant) throws IOEx * @param datatypeConstant a {@link DatatypeConstant} * @throws IOException */ - public void writeDatatypeConstantNoAbbreviations(DatatypeConstant datatypeConstant) throws IOException { - writer.write(getQuotedString(datatypeConstant.getLexicalValue())); - writer.write("^^"); - writer.write(getIri(datatypeConstant.getDatatype())); + public void writeDatatypeConstantNoAbbreviations(final DatatypeConstant datatypeConstant) throws IOException { + this.writer.write(this.getQuotedString(datatypeConstant.getLexicalValue())); + this.writer.write("^^"); + this.writer.write(this.getIri(datatypeConstant.getDatatype())); } /** @@ -450,9 +451,9 @@ public void writeDatatypeConstantNoAbbreviations(DatatypeConstant datatypeConsta * @param universalVariable a {@link UniversalVariable} * @throws IOException */ - public void writeUniversalVariable(UniversalVariable universalVariable) throws IOException { - writer.write("?"); - writer.write(universalVariable.getName()); + public void writeUniversalVariable(final UniversalVariable universalVariable) throws IOException { + this.writer.write("?"); + this.writer.write(universalVariable.getName()); } /** @@ -461,9 +462,9 @@ public void writeUniversalVariable(UniversalVariable universalVariable) throws I * @param existentialVariable a {@link ExistentialVariable} * @throws IOException */ - public void writeExistentialVariable(ExistentialVariable existentialVariable) throws IOException { - writer.write("!"); - writer.write(existentialVariable.getName()); + public void writeExistentialVariable(final ExistentialVariable existentialVariable) throws IOException { + this.writer.write("!"); + this.writer.write(existentialVariable.getName()); } /** @@ -472,9 +473,9 @@ public void writeExistentialVariable(ExistentialVariable existentialVariable) th * @param namedNull a {@link NamedNull} * @throws IOException */ - public void writeNamedNull(NamedNull namedNull) throws IOException { - writer.write("_:"); - writer.write(namedNull.getName()); + public void writeNamedNull(final NamedNull namedNull) throws IOException { + this.writer.write("_:"); + this.writer.write(namedNull.getName()); } /** @@ -485,29 +486,29 @@ public void writeNamedNull(NamedNull namedNull) throws IOException { * @throws IOException * @return true if anything has been written */ - public boolean writePrefixDeclarationRegistry(PrefixDeclarationRegistry prefixDeclarationRegistry) + public boolean writePrefixDeclarationRegistry(final PrefixDeclarationRegistry prefixDeclarationRegistry) throws IOException { boolean result = false; final String baseIri = prefixDeclarationRegistry.getBaseIri(); if (!PrefixDeclarationRegistry.EMPTY_BASE.contentEquals(baseIri)) { - writer.write("@base <"); - writer.write(baseIri); - writer.write(">"); - writer.write(STATEMENT_END); - writer.write("\n"); + this.writer.write("@base <"); + this.writer.write(baseIri); + this.writer.write(">"); + this.writer.write(STATEMENT_END); + this.writer.write("\n"); result = true; } - Iterator> prefixIterator = prefixDeclarationRegistry.iterator(); + final Iterator> prefixIterator = prefixDeclarationRegistry.iterator(); while (prefixIterator.hasNext()) { - Entry entry = prefixIterator.next(); - writer.write("@prefix "); - writer.write(entry.getKey()); - writer.write(" <"); - writer.write(entry.getValue()); - writer.write(">"); - writer.write(STATEMENT_END); - writer.write("\n"); + final Entry entry = prefixIterator.next(); + this.writer.write("@prefix "); + this.writer.write(entry.getKey()); + this.writer.write(" <"); + this.writer.write(entry.getValue()); + this.writer.write(">"); + this.writer.write(STATEMENT_END); + this.writer.write("\n"); result = true; } return result; @@ -519,10 +520,10 @@ public boolean writePrefixDeclarationRegistry(PrefixDeclarationRegistry prefixDe * @param languageStringConstant a {@link LanguageStringConstant} * @throws IOException */ - public void writeLanguageStringConstant(LanguageStringConstant languageStringConstant) throws IOException { - writer.write(getQuotedString(languageStringConstant.getString())); - writer.write("@"); - writer.write(languageStringConstant.getLanguageTag()); + public void writeLanguageStringConstant(final LanguageStringConstant languageStringConstant) throws IOException { + this.writer.write(this.getQuotedString(languageStringConstant.getString())); + this.writer.write("@"); + this.writer.write(languageStringConstant.getLanguageTag()); } /** @@ -531,21 +532,21 @@ public void writeLanguageStringConstant(LanguageStringConstant languageStringCon * @param command a {@link Command} * @throws IOException */ - public void writeCommand(Command command) throws IOException { - writer.write("@"); - writer.write(command.getName()); + public void writeCommand(final Command command) throws IOException { + this.writer.write("@"); + this.writer.write(command.getName()); - for (Argument argument : command.getArguments()) { - writer.write(" "); + for (final Argument argument : command.getArguments()) { + this.writer.write(" "); if (argument.fromRule().isPresent()) { - writeRuleNoStatment(argument.fromRule().get()); + this.writeRuleNoStatment(argument.fromRule().get()); } else if (argument.fromPositiveLiteral().isPresent()) { - writeLiteral(argument.fromPositiveLiteral().get()); + this.writeLiteral(argument.fromPositiveLiteral().get()); } else { - writeTerm(argument.fromTerm().get()); + this.writeTerm(argument.fromTerm().get()); } } - writer.write(STATEMENT_END); + this.writer.write(STATEMENT_END); } /** @@ -555,12 +556,12 @@ public void writeCommand(Command command) throws IOException { * a string * @return serialization string */ - public static String getSerialization(SerializationWriter writeAction) { + public static String getSerialization(final SerializationWriter writeAction) { final StringWriter stringWriter = new StringWriter(); final Serializer serializer = new Serializer(stringWriter); try { writeAction.write(serializer); - } catch (IOException e) { + } catch (final IOException e) { throw new RuntimeException("StringWriter should never throw an IOException."); } return stringWriter.toString(); @@ -588,6 +589,6 @@ private String getQuotedString(final String string) { } private String getIri(final String string) { - return iriTransformer.apply(string); + return this.iriTransformer.apply(string); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java index d94ea7128..156a03d23 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java @@ -117,10 +117,10 @@ public interface InferenceAction { * @return the correctness of the inferences, depending on the state of the * reasoning (materialisation) and its {@link KnowledgeBase}. */ - default Correctness unsafeForEachInference(BiConsumer> action) { + default Correctness unsafeForEachInference(final BiConsumer> action) { try { - return forEachInference(action::accept); - } catch (IOException e) { + return this.forEachInference(action::accept); + } catch (final IOException e) { throw new RulewerkRuntimeException(e); } } @@ -129,18 +129,18 @@ default Correctness unsafeForEachInference(BiConsumer> act * Exports all the (explicit and implicit) facts inferred during reasoning of * the knowledge base to an OutputStream. * - * @param stream an OutputStream for the facts to be written to. + * @param writer the {@link Writer} used to write inferences. * @return the correctness of the query answers, depending on the state of the * reasoning (materialisation) and its {@link KnowledgeBase}. * @throws IOException */ - default Correctness writeInferences(Writer writer) throws IOException { - final PrefixDeclarationRegistry prefixDeclarationRegistry = getKnowledgeBase().getPrefixDeclarationRegistry(); + default Correctness writeInferences(final Writer writer) throws IOException { + final PrefixDeclarationRegistry prefixDeclarationRegistry = this.getKnowledgeBase().getPrefixDeclarationRegistry(); final Serializer serializer = new Serializer(writer, prefixDeclarationRegistry); serializer.writePrefixDeclarationRegistry(prefixDeclarationRegistry); - return forEachInference((predicate, termList) -> { + return this.forEachInference((predicate, termList) -> { serializer.writePositiveLiteral(predicate, termList); writer.write(" .\n"); }); @@ -153,8 +153,8 @@ default Correctness writeInferences(Writer writer) throws IOException { * inferences. */ default Stream getInferences() { - Stream.Builder builder = Stream.builder(); - unsafeForEachInference((predicate, termList) -> builder.accept(Expressions.makeFact(predicate, termList))); + final Stream.Builder builder = Stream.builder(); + this.unsafeForEachInference((predicate, termList) -> builder.accept(Expressions.makeFact(predicate, termList))); return builder.build(); } @@ -180,7 +180,7 @@ default Stream getInferences() { * method will disappear. */ @Deprecated - default Correctness writeInferences(String filePath) throws FileNotFoundException, IOException { + default Correctness writeInferences(final String filePath) throws FileNotFoundException, IOException { try (Writer writer = new OutputStreamWriter(new FileOutputStream(filePath), StandardCharsets.UTF_8)) { return this.writeInferences(writer); } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java index 91c78b4e1..cb227662c 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java @@ -33,7 +33,7 @@ public interface DataSourceConfigurationVisitor { * Configure the reasoner for a {@link CsvFileDataSource}. * * @param dataSource the data source to configure - * @throws IOexception when an IO error occurs during configuration + * @throws IOException when an IO error occurs during configuration */ public void visit(CsvFileDataSource dataSource) throws IOException; @@ -41,7 +41,7 @@ public interface DataSourceConfigurationVisitor { * Configure the reasoner for a {@link RdfFileDataSource}. * * @param dataSource the data source to configure - * @throws IOexception when an IO error occurs during configuration + * @throws IOException when an IO error occurs during configuration */ public void visit(RdfFileDataSource dataSource) throws IOException; @@ -49,7 +49,7 @@ public interface DataSourceConfigurationVisitor { * Configure the reasoner for a {@link TridentDataSource}. * * @param dataSource the data source to configure - * @throws IOexception when an IO error occurs during configuration + * @throws IOException when an IO error occurs during configuration */ public void visit(TridentDataSource dataSource) throws IOException; diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java index 54080781b..4f03dde77 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java @@ -66,8 +66,8 @@ public class Skolemization { * @return a {@link RenamedNamedNull} with a new name that is specific to this * instance and {@code name}. */ - public RenamedNamedNull getRenamedNamedNull(String name) { - return new RenamedNamedNull(getFreshName(name)); + public RenamedNamedNull getRenamedNamedNull(final String name) { + return new RenamedNamedNull(this.getFreshName(name)); } /** @@ -80,8 +80,8 @@ public RenamedNamedNull getRenamedNamedNull(String name) { * @return a {@link AbstractConstant} with an IRI that is specific to this * instance and {@code name}. */ - public AbstractConstant getSkolemConstant(String name, TermFactory termFactory) { - return termFactory.makeAbstractConstant(getSkolemConstantName(name)); + public AbstractConstant getSkolemConstant(final String name, final TermFactory termFactory) { + return termFactory.makeAbstractConstant(this.getSkolemConstantName(name)); } /** @@ -95,8 +95,8 @@ public AbstractConstant getSkolemConstant(String name, TermFactory termFactory) * @return a {@link AbstractConstant} with an IRI that is specific to this * instance and {@code namedNull}. */ - public AbstractConstant getSkolemConstant(NamedNull namedNull, TermFactory termFactory) { - return termFactory.makeAbstractConstant(getSkolemConstantName(namedNull)); + public AbstractConstant getSkolemConstant(final NamedNull namedNull, final TermFactory termFactory) { + return termFactory.makeAbstractConstant(this.getSkolemConstantName(namedNull)); } @@ -108,8 +108,8 @@ public AbstractConstant getSkolemConstant(NamedNull namedNull, TermFactory termF * other string for which to create a unique renaming) * @return string that is an IRI for a skolem constant */ - public String getSkolemConstantName(String name) { - return getSkolemConstantNameFromUniqueName(getFreshName(name).toString()); + public String getSkolemConstantName(final String name) { + return this.getSkolemConstantNameFromUniqueName(this.getFreshName(name).toString()); } /** @@ -117,15 +117,15 @@ public String getSkolemConstantName(String name) { * named {@link NamedNull}. The method ensures that a new unique name is * generated unless the given object is already a {@link RenamedNamedNull}. * - * @param name the name of the {@link NamedNull} to be renamed here (or any - * other string for which to create a unique renaming) + * @param namedNull the name of the {@link NamedNull} to be renamed here (or any + * other string for which to create a unique renaming) * @return string that is an IRI for a skolem constant */ - public String getSkolemConstantName(NamedNull namedNull) { + public String getSkolemConstantName(final NamedNull namedNull) { if (namedNull instanceof RenamedNamedNull) { - return getSkolemConstantNameFromUniqueName(namedNull.getName()); + return this.getSkolemConstantNameFromUniqueName(namedNull.getName()); } else { - return getSkolemConstantName(namedNull.getName()); + return this.getSkolemConstantName(namedNull.getName()); } } @@ -135,7 +135,7 @@ public String getSkolemConstantName(NamedNull namedNull) { * @param name local id of skolem constant * @return IRI string */ - private String getSkolemConstantNameFromUniqueName(String name) { + private String getSkolemConstantNameFromUniqueName(final String name) { return SKOLEM_IRI_PREFIX + SKOLEM_UUID_START + name; } @@ -147,10 +147,10 @@ private String getSkolemConstantNameFromUniqueName(String name) { * @param name the string to be renamed * @return a UUID for the new name */ - public UUID getFreshName(String name) { - byte[] nameBytes = name.getBytes(); - ByteArrayOutputStream stream = new ByteArrayOutputStream(); - stream.write(namedNullNamespace, 0, namedNullNamespace.length); + public UUID getFreshName(final String name) { + final byte[] nameBytes = name.getBytes(); + final ByteArrayOutputStream stream = new ByteArrayOutputStream(); + stream.write(this.namedNullNamespace, 0, this.namedNullNamespace.length); stream.write(nameBytes, 0, nameBytes.length); return UUID.nameUUIDFromBytes(stream.toByteArray()); } diff --git a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java index 768cc945a..7bc936719 100644 --- a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java +++ b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java @@ -31,10 +31,10 @@ import org.openrdf.model.Statement; import org.openrdf.model.URI; import org.openrdf.model.Value; -import org.semanticweb.rulewerk.core.model.api.NamedNull; import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.NamedNull; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; @@ -50,8 +50,8 @@ * given {@code rdfModel} into an {@link PositiveLiteral} of the form * {@code TRIPLE(subject, predicate, object)}. The ternary predicate used for * all literals generated from RDF triples is - * {@link RdfModelConverter#RDF_TRIPLE_PREDICATE}. Subject, predicate and object - * {@link Value}s are converted to corresponding {@link Term}s: + * {@link RdfModelConverter#RDF_TRIPLE_PREDICATE_NAME}. Subject, predicate and + * object {@link Value}s are converted to corresponding {@link Term}s: *